diff --git a/README.md b/README.md
index e7f62b4b..179c1943 100644
--- a/README.md
+++ b/README.md
@@ -110,7 +110,7 @@ As mentioned above, VulcanizeDB's processes can be split into three categories:
### Data syncing
To provide data for transformations, raw Ethereum data must first be synced into VulcanizeDB.
-This is accomplished through the use of the `headerSync`, `fullSync`, or `coldImport` commands.
+This is accomplished through the use of the `headerSync` command.
These commands are described in detail [here](documentation/data-syncing.md).
### Data transformation
diff --git a/cmd/coldImport.go b/cmd/coldImport.go
deleted file mode 100644
index 4e8a8a44..00000000
--- a/cmd/coldImport.go
+++ /dev/null
@@ -1,97 +0,0 @@
-// VulcanizeDB
-// Copyright © 2019 Vulcanize
-
-// This program is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Affero General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-
-// This program is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Affero General Public License for more details.
-
-// You should have received a copy of the GNU Affero General Public License
-// along with this program. If not, see .
-
-package cmd
-
-import (
- log "github.com/sirupsen/logrus"
- "github.com/spf13/cobra"
-
- "github.com/vulcanize/vulcanizedb/pkg/eth/cold_import"
- "github.com/vulcanize/vulcanizedb/pkg/eth/converters/cold_db"
- vulcCommon "github.com/vulcanize/vulcanizedb/pkg/eth/converters/common"
- "github.com/vulcanize/vulcanizedb/pkg/eth/crypto"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/ethereum"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres/repositories"
- "github.com/vulcanize/vulcanizedb/pkg/fs"
- "github.com/vulcanize/vulcanizedb/utils"
-)
-
-var coldImportCmd = &cobra.Command{
- Use: "coldImport",
- Short: "Sync vulcanize from a cold instance of LevelDB.",
- Long: `Populate core vulcanize db data directly out of LevelDB, rather than over rpc calls. For example:
-
-./vulcanizedb coldImport -s 0 -e 5000000
-
-Geth must be synced over all of the desired blocks and must not be running in order to execute this command.`,
- Run: func(cmd *cobra.Command, args []string) {
- subCommand = cmd.CalledAs()
- logWithCommand = *log.WithField("SubCommand", subCommand)
- coldImport()
- },
-}
-
-func init() {
- rootCmd.AddCommand(coldImportCmd)
- coldImportCmd.Flags().Int64VarP(&startingBlockNumber, "starting-block-number", "s", 0, "BlockNumber for first block to cold import.")
- coldImportCmd.Flags().Int64VarP(&endingBlockNumber, "ending-block-number", "e", 5500000, "BlockNumber for last block to cold import.")
- coldImportCmd.Flags().BoolVarP(&syncAll, "all", "a", false, "Option to sync all missing blocks.")
-}
-
-func coldImport() {
- // init eth db
- ethDBConfig := ethereum.CreateDatabaseConfig(ethereum.Level, levelDbPath)
- ethDB, err := ethereum.CreateDatabase(ethDBConfig)
- if err != nil {
- logWithCommand.Fatal("Error connecting to ethereum db: ", err)
- }
- mostRecentBlockNumberInDb := ethDB.GetHeadBlockNumber()
- if syncAll {
- startingBlockNumber = 0
- endingBlockNumber = mostRecentBlockNumberInDb
- }
- if endingBlockNumber < startingBlockNumber {
- logWithCommand.Fatal("Ending block number must be greater than starting block number for cold import.")
- }
- if endingBlockNumber > mostRecentBlockNumberInDb {
- logWithCommand.Fatal("Ending block number is greater than most recent block in db: ", mostRecentBlockNumberInDb)
- }
-
- // init pg db
- genesisBlock := ethDB.GetBlockHash(0)
- reader := fs.FsReader{}
- parser := crypto.EthPublicKeyParser{}
- nodeBuilder := cold_import.NewColdImportNodeBuilder(reader, parser)
- coldNode, err := nodeBuilder.GetNode(genesisBlock, levelDbPath)
- if err != nil {
- logWithCommand.Fatal("Error getting node: ", err)
- }
- pgDB := utils.LoadPostgres(databaseConfig, coldNode)
-
- // init cold importer deps
- blockRepository := repositories.NewBlockRepository(&pgDB)
- receiptRepository := repositories.FullSyncReceiptRepository{DB: &pgDB}
- transactionConverter := cold_db.NewColdDbTransactionConverter()
- blockConverter := vulcCommon.NewBlockConverter(transactionConverter)
-
- // init and execute cold importer
- coldImporter := cold_import.NewColdImporter(ethDB, blockRepository, receiptRepository, blockConverter)
- err = coldImporter.Execute(startingBlockNumber, endingBlockNumber, coldNode.ID)
- if err != nil {
- logWithCommand.Fatal("Error executing cold import: ", err)
- }
-}
diff --git a/cmd/contractWatcher.go b/cmd/contractWatcher.go
index ce46092e..dd7759a0 100644
--- a/cmd/contractWatcher.go
+++ b/cmd/contractWatcher.go
@@ -25,7 +25,6 @@ import (
st "github.com/vulcanize/vulcanizedb/libraries/shared/transformer"
"github.com/vulcanize/vulcanizedb/pkg/config"
- ft "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/full/transformer"
ht "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/header/transformer"
"github.com/vulcanize/vulcanizedb/utils"
)
@@ -99,14 +98,7 @@ func contractWatcher() {
var t st.ContractTransformer
con := config.ContractConfig{}
con.PrepConfig()
- switch mode {
- case "header":
- t = ht.NewTransformer(con, blockChain, &db)
- case "full":
- t = ft.NewTransformer(con, blockChain, &db)
- default:
- logWithCommand.Fatal("Invalid mode")
- }
+ t = ht.NewTransformer(con, blockChain, &db)
err := t.Init()
if err != nil {
@@ -123,5 +115,4 @@ func contractWatcher() {
func init() {
rootCmd.AddCommand(contractWatcherCmd)
- contractWatcherCmd.Flags().StringVarP(&mode, "mode", "o", "header", "'header' or 'full' mode to work with either header synced or fully synced vDB (default is header)")
}
diff --git a/cmd/fullSync.go b/cmd/fullSync.go
deleted file mode 100644
index 379ba74d..00000000
--- a/cmd/fullSync.go
+++ /dev/null
@@ -1,105 +0,0 @@
-// VulcanizeDB
-// Copyright © 2019 Vulcanize
-
-// This program is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Affero General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-
-// This program is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Affero General Public License for more details.
-
-// You should have received a copy of the GNU Affero General Public License
-// along with this program. If not, see .
-
-package cmd
-
-import (
- "time"
-
- log "github.com/sirupsen/logrus"
- "github.com/spf13/cobra"
-
- "github.com/vulcanize/vulcanizedb/pkg/eth/core"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres/repositories"
- "github.com/vulcanize/vulcanizedb/pkg/eth/history"
- "github.com/vulcanize/vulcanizedb/utils"
-)
-
-// fullSyncCmd represents the fullSync command
-var fullSyncCmd = &cobra.Command{
- Use: "fullSync",
- Short: "Syncs VulcanizeDB with local ethereum node",
- Long: `Syncs VulcanizeDB with local ethereum node. Populates
-Postgres with blocks, transactions, receipts, and logs.
-
-./vulcanizedb fullSync --starting-block-number 0 --config public.toml
-
-Expects ethereum node to be running and requires a .toml config:
-
- [database]
- name = "vulcanize_public"
- hostname = "localhost"
- port = 5432
-
- [client]
- ipcPath = "/Users/user/Library/Ethereum/geth.ipc"
-`,
- Run: func(cmd *cobra.Command, args []string) {
- subCommand = cmd.CalledAs()
- logWithCommand = *log.WithField("SubCommand", subCommand)
- fullSync()
- },
-}
-
-func init() {
- rootCmd.AddCommand(fullSyncCmd)
- fullSyncCmd.Flags().Int64VarP(&startingBlockNumber, "starting-block-number", "s", 0, "Block number to start syncing from")
-}
-
-func backFillAllBlocks(blockchain core.BlockChain, blockRepository datastore.BlockRepository, missingBlocksPopulated chan int, startingBlockNumber int64) {
- populated, err := history.PopulateMissingBlocks(blockchain, blockRepository, startingBlockNumber)
- if err != nil {
- logWithCommand.Error("backfillAllBlocks: error in populateMissingBlocks: ", err)
- }
- missingBlocksPopulated <- populated
-}
-
-func fullSync() {
- ticker := time.NewTicker(pollingInterval)
- defer ticker.Stop()
-
- blockChain := getBlockChain()
- lastBlock, err := blockChain.LastBlock()
- if err != nil {
- logWithCommand.Error("fullSync: Error getting last block: ", err)
- }
- if lastBlock.Int64() == 0 {
- logWithCommand.Fatal("geth initial: state sync not finished")
- }
- if startingBlockNumber > lastBlock.Int64() {
- logWithCommand.Fatal("fullSync: starting block number > current block number")
- }
-
- db := utils.LoadPostgres(databaseConfig, blockChain.Node())
- blockRepository := repositories.NewBlockRepository(&db)
- validator := history.NewBlockValidator(blockChain, blockRepository, validationWindow)
- missingBlocksPopulated := make(chan int)
- go backFillAllBlocks(blockChain, blockRepository, missingBlocksPopulated, startingBlockNumber)
-
- for {
- select {
- case <-ticker.C:
- window, err := validator.ValidateBlocks()
- if err != nil {
- logWithCommand.Error("fullSync: error in validateBlocks: ", err)
- }
- logWithCommand.Debug(window.GetString())
- case <-missingBlocksPopulated:
- go backFillAllBlocks(blockChain, blockRepository, missingBlocksPopulated, startingBlockNumber)
- }
- }
-}
diff --git a/db/migrations/00001_create_blocks_table.sql b/db/migrations/00001_create_blocks_table.sql
deleted file mode 100644
index 9ea69e88..00000000
--- a/db/migrations/00001_create_blocks_table.sql
+++ /dev/null
@@ -1,23 +0,0 @@
--- +goose Up
-CREATE TABLE public.eth_blocks (
- id SERIAL PRIMARY KEY,
- difficulty BIGINT,
- extra_data VARCHAR,
- gas_limit BIGINT,
- gas_used BIGINT,
- hash VARCHAR(66),
- miner VARCHAR(42),
- nonce VARCHAR(20),
- "number" BIGINT,
- parent_hash VARCHAR(66),
- reward NUMERIC,
- uncles_reward NUMERIC,
- "size" VARCHAR,
- "time" BIGINT,
- is_final BOOLEAN,
- uncle_hash VARCHAR(66)
-);
-
-
--- +goose Down
-DROP TABLE public.eth_blocks;
\ No newline at end of file
diff --git a/db/migrations/00005_create_nodes_table.sql b/db/migrations/00001_create_nodes_table.sql
similarity index 91%
rename from db/migrations/00005_create_nodes_table.sql
rename to db/migrations/00001_create_nodes_table.sql
index c2c0c7b8..76db3d71 100644
--- a/db/migrations/00005_create_nodes_table.sql
+++ b/db/migrations/00001_create_nodes_table.sql
@@ -3,7 +3,7 @@ CREATE TABLE nodes (
id SERIAL PRIMARY KEY,
client_name VARCHAR,
genesis_block VARCHAR(66),
- network_id NUMERIC,
+ network_id VARCHAR,
node_id VARCHAR(128),
CONSTRAINT node_uc UNIQUE (genesis_block, network_id, node_id)
);
diff --git a/db/migrations/00013_add_address_table.sql b/db/migrations/00002_create_addresses_table.sql
similarity index 100%
rename from db/migrations/00013_add_address_table.sql
rename to db/migrations/00002_create_addresses_table.sql
diff --git a/db/migrations/00002_create_full_sync_transactions_table.sql b/db/migrations/00002_create_full_sync_transactions_table.sql
deleted file mode 100644
index e8c00187..00000000
--- a/db/migrations/00002_create_full_sync_transactions_table.sql
+++ /dev/null
@@ -1,18 +0,0 @@
--- +goose Up
-CREATE TABLE full_sync_transactions (
- id SERIAL PRIMARY KEY,
- block_id INTEGER NOT NULL REFERENCES eth_blocks(id) ON DELETE CASCADE,
- gas_limit NUMERIC,
- gas_price NUMERIC,
- hash VARCHAR(66),
- input_data BYTEA,
- nonce NUMERIC,
- raw BYTEA,
- tx_from VARCHAR(66),
- tx_index INTEGER,
- tx_to VARCHAR(66),
- "value" NUMERIC
-);
-
--- +goose Down
-DROP TABLE full_sync_transactions;
\ No newline at end of file
diff --git a/db/migrations/00003_add_block_index_to_blocks.sql b/db/migrations/00003_add_block_index_to_blocks.sql
deleted file mode 100644
index d9689c73..00000000
--- a/db/migrations/00003_add_block_index_to_blocks.sql
+++ /dev/null
@@ -1,6 +0,0 @@
--- +goose Up
-CREATE INDEX number_index ON eth_blocks (number);
-
-
--- +goose Down
-DROP INDEX number_index;
diff --git a/db/migrations/00023_create_headers_table.sql b/db/migrations/00003_create_headers_table.sql
similarity index 88%
rename from db/migrations/00023_create_headers_table.sql
rename to db/migrations/00003_create_headers_table.sql
index f58e8eb5..50616795 100644
--- a/db/migrations/00023_create_headers_table.sql
+++ b/db/migrations/00003_create_headers_table.sql
@@ -7,7 +7,7 @@ CREATE TABLE public.headers
raw JSONB,
block_timestamp NUMERIC,
check_count INTEGER NOT NULL DEFAULT 0,
- eth_node_id INTEGER NOT NULL REFERENCES eth_nodes (id) ON DELETE CASCADE,
+ node_id INTEGER NOT NULL REFERENCES nodes (id) ON DELETE CASCADE,
eth_node_fingerprint VARCHAR(128),
UNIQUE (block_number, hash, eth_node_fingerprint)
);
diff --git a/db/migrations/00024_create_checked_headers_table.sql b/db/migrations/00004_create_checked_headers_table.sql
similarity index 100%
rename from db/migrations/00024_create_checked_headers_table.sql
rename to db/migrations/00004_create_checked_headers_table.sql
diff --git a/db/migrations/00004_create_contracts_table.sql b/db/migrations/00004_create_contracts_table.sql
deleted file mode 100644
index 3ff43bef..00000000
--- a/db/migrations/00004_create_contracts_table.sql
+++ /dev/null
@@ -1,10 +0,0 @@
--- +goose Up
-CREATE TABLE watched_contracts
-(
- contract_id SERIAL PRIMARY KEY,
- contract_abi json,
- contract_hash VARCHAR(66) UNIQUE
-);
-
--- +goose Down
-DROP TABLE watched_contracts;
diff --git a/db/migrations/00025_create_storage_diffs_table.sql b/db/migrations/00005_create_storage_diffs_table.sql
similarity index 100%
rename from db/migrations/00025_create_storage_diffs_table.sql
rename to db/migrations/00005_create_storage_diffs_table.sql
diff --git a/db/migrations/00006_add_node_fk_to_blocks.sql b/db/migrations/00006_add_node_fk_to_blocks.sql
deleted file mode 100644
index 31743901..00000000
--- a/db/migrations/00006_add_node_fk_to_blocks.sql
+++ /dev/null
@@ -1,11 +0,0 @@
--- +goose Up
-ALTER TABLE eth_blocks
- ADD COLUMN node_id INTEGER NOT NULL,
- ADD CONSTRAINT node_fk
-FOREIGN KEY (node_id)
-REFERENCES nodes (id)
-ON DELETE CASCADE;
-
--- +goose Down
-ALTER TABLE eth_blocks
- DROP COLUMN node_id;
diff --git a/db/migrations/00026_create_queued_storage_diffs_table.sql b/db/migrations/00006_create_queued_storage_diffs_table.sql
similarity index 100%
rename from db/migrations/00026_create_queued_storage_diffs_table.sql
rename to db/migrations/00006_create_queued_storage_diffs_table.sql
diff --git a/db/migrations/00007_create_full_sync_logs_table.sql b/db/migrations/00007_create_full_sync_logs_table.sql
deleted file mode 100644
index 67cc31bf..00000000
--- a/db/migrations/00007_create_full_sync_logs_table.sql
+++ /dev/null
@@ -1,18 +0,0 @@
--- +goose Up
-CREATE TABLE full_sync_logs
-(
- id SERIAL PRIMARY KEY,
- block_number BIGINT,
- address VARCHAR(66),
- tx_hash VARCHAR(66),
- index BIGINT,
- topic0 VARCHAR(66),
- topic1 VARCHAR(66),
- topic2 VARCHAR(66),
- topic3 VARCHAR(66),
- data TEXT
-);
-
-
--- +goose Down
-DROP TABLE full_sync_logs;
diff --git a/db/migrations/00027_create_header_sync_transactions_table.sql b/db/migrations/00007_create_header_sync_transactions_table.sql
similarity index 100%
rename from db/migrations/00027_create_header_sync_transactions_table.sql
rename to db/migrations/00007_create_header_sync_transactions_table.sql
diff --git a/db/migrations/00008_add_node_block_number_unique_constraint_to_blocks.sql b/db/migrations/00008_add_node_block_number_unique_constraint_to_blocks.sql
deleted file mode 100644
index 0d619f44..00000000
--- a/db/migrations/00008_add_node_block_number_unique_constraint_to_blocks.sql
+++ /dev/null
@@ -1,7 +0,0 @@
--- +goose Up
-ALTER TABLE eth_blocks
- ADD CONSTRAINT node_id_block_number_uc UNIQUE (number, node_id);
-
--- +goose Down
-ALTER TABLE eth_blocks
- DROP CONSTRAINT node_id_block_number_uc;
diff --git a/db/migrations/00028_create_header_sync_receipts_table.sql b/db/migrations/00008_create_header_sync_receipts_table.sql
similarity index 100%
rename from db/migrations/00028_create_header_sync_receipts_table.sql
rename to db/migrations/00008_create_header_sync_receipts_table.sql
diff --git a/db/migrations/00009_add_block_id_index_to_full_sync_transactions.sql b/db/migrations/00009_add_block_id_index_to_full_sync_transactions.sql
deleted file mode 100644
index 99b2ca78..00000000
--- a/db/migrations/00009_add_block_id_index_to_full_sync_transactions.sql
+++ /dev/null
@@ -1,5 +0,0 @@
--- +goose Up
-CREATE INDEX block_id_index ON full_sync_transactions (block_id);
-
--- +goose Down
-DROP INDEX block_id_index;
diff --git a/db/migrations/00030_create_header_sync_logs_table.sql b/db/migrations/00009_create_header_sync_logs_table.sql
similarity index 100%
rename from db/migrations/00030_create_header_sync_logs_table.sql
rename to db/migrations/00009_create_header_sync_logs_table.sql
diff --git a/db/migrations/00010_add_node_id_index_to_blocks.sql b/db/migrations/00010_add_node_id_index_to_blocks.sql
deleted file mode 100644
index 8fc35878..00000000
--- a/db/migrations/00010_add_node_id_index_to_blocks.sql
+++ /dev/null
@@ -1,5 +0,0 @@
--- +goose Up
-CREATE INDEX node_id_index ON eth_blocks (node_id);
-
--- +goose Down
-DROP INDEX node_id_index;
diff --git a/db/migrations/00031_create_watched_logs_table.sql b/db/migrations/00010_create_watched_logs_table.sql
similarity index 100%
rename from db/migrations/00031_create_watched_logs_table.sql
rename to db/migrations/00010_create_watched_logs_table.sql
diff --git a/db/migrations/00011_add_tx_to_index_to_full_sync_transactions.sql b/db/migrations/00011_add_tx_to_index_to_full_sync_transactions.sql
deleted file mode 100644
index cf2977d1..00000000
--- a/db/migrations/00011_add_tx_to_index_to_full_sync_transactions.sql
+++ /dev/null
@@ -1,5 +0,0 @@
--- +goose Up
-CREATE INDEX tx_to_index ON full_sync_transactions(tx_to);
-
--- +goose Down
-DROP INDEX tx_to_index;
diff --git a/db/migrations/00032_create_eth_schema.sql b/db/migrations/00011_create_eth_schema.sql
similarity index 100%
rename from db/migrations/00032_create_eth_schema.sql
rename to db/migrations/00011_create_eth_schema.sql
diff --git a/db/migrations/00012_add_tx_from_index_to_full_sync_transactions.sql b/db/migrations/00012_add_tx_from_index_to_full_sync_transactions.sql
deleted file mode 100644
index fa6f0543..00000000
--- a/db/migrations/00012_add_tx_from_index_to_full_sync_transactions.sql
+++ /dev/null
@@ -1,5 +0,0 @@
--- +goose Up
-CREATE INDEX tx_from_index ON full_sync_transactions(tx_from);
-
--- +goose Down
-DROP INDEX tx_from_index;
diff --git a/db/migrations/00033_create_eth_header_cids_table.sql b/db/migrations/00012_create_eth_header_cids_table.sql
similarity index 82%
rename from db/migrations/00033_create_eth_header_cids_table.sql
rename to db/migrations/00012_create_eth_header_cids_table.sql
index cf7f42f9..5e646e0f 100644
--- a/db/migrations/00033_create_eth_header_cids_table.sql
+++ b/db/migrations/00012_create_eth_header_cids_table.sql
@@ -6,6 +6,7 @@ CREATE TABLE eth.header_cids (
parent_hash VARCHAR(66) NOT NULL,
cid TEXT NOT NULL,
td BIGINT,
+ node_id INTEGER NOT NULL REFERENCES nodes (id) ON DELETE CASCADE,
UNIQUE (block_number, block_hash)
);
diff --git a/db/migrations/00034_create_eth_uncle_cids_table.sql b/db/migrations/00013_create_eth_uncle_cids_table.sql
similarity index 100%
rename from db/migrations/00034_create_eth_uncle_cids_table.sql
rename to db/migrations/00013_create_eth_uncle_cids_table.sql
diff --git a/db/migrations/00035_create_eth_transaction_cids_table.sql b/db/migrations/00014_create_eth_transaction_cids_table.sql
similarity index 100%
rename from db/migrations/00035_create_eth_transaction_cids_table.sql
rename to db/migrations/00014_create_eth_transaction_cids_table.sql
diff --git a/db/migrations/00014_create_receipts_table.sql b/db/migrations/00014_create_receipts_table.sql
deleted file mode 100644
index b39e4492..00000000
--- a/db/migrations/00014_create_receipts_table.sql
+++ /dev/null
@@ -1,16 +0,0 @@
--- +goose Up
-CREATE TABLE full_sync_receipts
-(
- id SERIAL PRIMARY KEY,
- transaction_id INTEGER NOT NULL REFERENCES full_sync_transactions (id) ON DELETE CASCADE,
- contract_address_id INTEGER NOT NULL REFERENCES addresses (id) ON DELETE CASCADE,
- cumulative_gas_used NUMERIC,
- gas_used NUMERIC,
- state_root VARCHAR(66),
- status INTEGER,
- tx_hash VARCHAR(66)
-);
-
-
--- +goose Down
-DROP TABLE full_sync_receipts;
diff --git a/db/migrations/00015_add_transaction_id_index_to_receipts.sql b/db/migrations/00015_add_transaction_id_index_to_receipts.sql
deleted file mode 100644
index 04ff38b7..00000000
--- a/db/migrations/00015_add_transaction_id_index_to_receipts.sql
+++ /dev/null
@@ -1,5 +0,0 @@
--- +goose Up
-CREATE INDEX transaction_id_index ON full_sync_receipts (transaction_id);
-
--- +goose Down
-DROP INDEX transaction_id_index;
diff --git a/db/migrations/00036_create_eth_receipt_cids_table.sql b/db/migrations/00015_create_eth_receipt_cids_table.sql
similarity index 100%
rename from db/migrations/00036_create_eth_receipt_cids_table.sql
rename to db/migrations/00015_create_eth_receipt_cids_table.sql
diff --git a/db/migrations/00016_add_receipts_fk_to_logs.sql b/db/migrations/00016_add_receipts_fk_to_logs.sql
deleted file mode 100644
index 760cd504..00000000
--- a/db/migrations/00016_add_receipts_fk_to_logs.sql
+++ /dev/null
@@ -1,17 +0,0 @@
--- +goose Up
-ALTER TABLE full_sync_logs
- ADD COLUMN receipt_id INT;
-
-ALTER TABLE full_sync_logs
- ADD CONSTRAINT receipts_fk
- FOREIGN KEY (receipt_id)
- REFERENCES full_sync_receipts (id)
- ON DELETE CASCADE;
-
-
--- +goose Down
-ALTER TABLE full_sync_logs
- DROP CONSTRAINT receipts_fk;
-
-ALTER TABLE full_sync_logs
- DROP COLUMN receipt_id;
diff --git a/db/migrations/00037_create_eth_state_cids_table.sql b/db/migrations/00016_create_eth_state_cids_table.sql
similarity index 100%
rename from db/migrations/00037_create_eth_state_cids_table.sql
rename to db/migrations/00016_create_eth_state_cids_table.sql
diff --git a/db/migrations/00038_create_eth_storage_cids_table.sql b/db/migrations/00017_create_eth_storage_cids_table.sql
similarity index 100%
rename from db/migrations/00038_create_eth_storage_cids_table.sql
rename to db/migrations/00017_create_eth_storage_cids_table.sql
diff --git a/db/migrations/00017_create_log_filters.sql b/db/migrations/00017_create_log_filters.sql
deleted file mode 100644
index 0367f5e5..00000000
--- a/db/migrations/00017_create_log_filters.sql
+++ /dev/null
@@ -1,16 +0,0 @@
--- +goose Up
-CREATE TABLE log_filters (
- id SERIAL,
- name VARCHAR NOT NULL CHECK (name <> ''),
- from_block BIGINT CHECK (from_block >= 0),
- to_block BIGINT CHECK (from_block >= 0),
- address VARCHAR(66),
- topic0 VARCHAR(66),
- topic1 VARCHAR(66),
- topic2 VARCHAR(66),
- topic3 VARCHAR(66),
- CONSTRAINT name_uc UNIQUE (name)
-);
-
--- +goose Down
-DROP TABLE log_filters;
diff --git a/db/migrations/00039_create_ipfs_blocks_table.sql b/db/migrations/00018_create_ipfs_blocks_table.sql
similarity index 100%
rename from db/migrations/00039_create_ipfs_blocks_table.sql
rename to db/migrations/00018_create_ipfs_blocks_table.sql
diff --git a/db/migrations/00018_create_watched_event_logs.sql b/db/migrations/00018_create_watched_event_logs.sql
deleted file mode 100644
index 697c788d..00000000
--- a/db/migrations/00018_create_watched_event_logs.sql
+++ /dev/null
@@ -1,32 +0,0 @@
--- +goose Up
-CREATE VIEW block_stats AS
-SELECT max(block_number) AS max_block,
- min(block_number) AS min_block
-FROM full_sync_logs;
-
-CREATE VIEW watched_event_logs AS
-SELECT log_filters.name,
- full_sync_logs.id,
- block_number,
- full_sync_logs.address,
- tx_hash,
- index,
- full_sync_logs.topic0,
- full_sync_logs.topic1,
- full_sync_logs.topic2,
- full_sync_logs.topic3,
- data,
- receipt_id
-FROM log_filters
- CROSS JOIN block_stats
- JOIN full_sync_logs ON full_sync_logs.address = log_filters.address
- AND full_sync_logs.block_number >= coalesce(log_filters.from_block, block_stats.min_block)
- AND full_sync_logs.block_number <= coalesce(log_filters.to_block, block_stats.max_block)
-WHERE (log_filters.topic0 = full_sync_logs.topic0 OR log_filters.topic0 ISNULL)
- AND (log_filters.topic1 = full_sync_logs.topic1 OR log_filters.topic1 ISNULL)
- AND (log_filters.topic2 = full_sync_logs.topic2 OR log_filters.topic2 ISNULL)
- AND (log_filters.topic3 = full_sync_logs.topic3 OR log_filters.topic3 ISNULL);
-
--- +goose Down
-DROP VIEW watched_event_logs;
-DROP VIEW block_stats;
diff --git a/db/migrations/00040_create_btc_schema.sql b/db/migrations/00019_create_btc_schema.sql
similarity index 100%
rename from db/migrations/00040_create_btc_schema.sql
rename to db/migrations/00019_create_btc_schema.sql
diff --git a/db/migrations/00019_update_log_filters_to_block_constraint.sql b/db/migrations/00019_update_log_filters_to_block_constraint.sql
deleted file mode 100644
index 512a44db..00000000
--- a/db/migrations/00019_update_log_filters_to_block_constraint.sql
+++ /dev/null
@@ -1,14 +0,0 @@
--- +goose Up
-ALTER TABLE log_filters
- DROP CONSTRAINT log_filters_from_block_check1;
-
-ALTER TABLE log_filters
- ADD CONSTRAINT log_filters_to_block_check CHECK (to_block >= 0);
-
-
--- +goose Down
-ALTER TABLE log_filters
- DROP CONSTRAINT log_filters_to_block_check;
-
-ALTER TABLE log_filters
- ADD CONSTRAINT log_filters_from_block_check1 CHECK (to_block >= 0);
diff --git a/db/migrations/00041_create_btc_header_cids_table.sql b/db/migrations/00020_create_btc_header_cids_table.sql
similarity index 82%
rename from db/migrations/00041_create_btc_header_cids_table.sql
rename to db/migrations/00020_create_btc_header_cids_table.sql
index 62ffb14b..9d2c1553 100644
--- a/db/migrations/00041_create_btc_header_cids_table.sql
+++ b/db/migrations/00020_create_btc_header_cids_table.sql
@@ -7,6 +7,7 @@ CREATE TABLE btc.header_cids (
cid TEXT NOT NULL,
timestamp NUMERIC NOT NULL,
bits BIGINT NOT NULL,
+ node_id INTEGER NOT NULL REFERENCES nodes (id) ON DELETE CASCADE,
UNIQUE (block_number, block_hash)
);
diff --git a/db/migrations/00020_rename_node_table.sql b/db/migrations/00020_rename_node_table.sql
deleted file mode 100644
index 7ab32eac..00000000
--- a/db/migrations/00020_rename_node_table.sql
+++ /dev/null
@@ -1,43 +0,0 @@
--- +goose Up
-ALTER TABLE public.nodes RENAME TO eth_nodes;
-
-ALTER TABLE public.eth_nodes RENAME COLUMN node_id TO eth_node_id;
-
-ALTER TABLE public.eth_nodes DROP CONSTRAINT node_uc;
-ALTER TABLE public.eth_nodes
- ADD CONSTRAINT eth_node_uc UNIQUE (genesis_block, network_id, eth_node_id);
-
-ALTER TABLE public.eth_blocks RENAME COLUMN node_id TO eth_node_id;
-
-ALTER TABLE public.eth_blocks DROP CONSTRAINT node_id_block_number_uc;
-ALTER TABLE public.eth_blocks
- ADD CONSTRAINT eth_node_id_block_number_uc UNIQUE (number, eth_node_id);
-
-ALTER TABLE public.eth_blocks DROP CONSTRAINT node_fk;
-ALTER TABLE public.eth_blocks
- ADD CONSTRAINT node_fk
-FOREIGN KEY (eth_node_id) REFERENCES eth_nodes (id) ON DELETE CASCADE;
-
-
--- +goose Down
-ALTER TABLE public.eth_nodes
- RENAME TO nodes;
-
-ALTER TABLE public.nodes
- RENAME COLUMN eth_node_id TO node_id;
-
-ALTER TABLE public.nodes
- DROP CONSTRAINT eth_node_uc;
-ALTER TABLE public.nodes
- ADD CONSTRAINT node_uc UNIQUE (genesis_block, network_id, node_id);
-
-ALTER TABLE public.eth_blocks RENAME COLUMN eth_node_id TO node_id;
-
-ALTER TABLE public.eth_blocks DROP CONSTRAINT eth_node_id_block_number_uc;
-ALTER TABLE public.eth_blocks
- ADD CONSTRAINT node_id_block_number_uc UNIQUE (number, node_id);
-
-ALTER TABLE public.eth_blocks DROP CONSTRAINT node_fk;
-ALTER TABLE public.eth_blocks
- ADD CONSTRAINT node_fk
-FOREIGN KEY (node_id) REFERENCES nodes (id) ON DELETE CASCADE;
diff --git a/db/migrations/00021_associate_receipts_with_blocks.sql b/db/migrations/00021_associate_receipts_with_blocks.sql
deleted file mode 100644
index 8cb6228c..00000000
--- a/db/migrations/00021_associate_receipts_with_blocks.sql
+++ /dev/null
@@ -1,44 +0,0 @@
--- +goose Up
-ALTER TABLE full_sync_receipts
- ADD COLUMN block_id INT;
-
-UPDATE full_sync_receipts
- SET block_id = (
- SELECT block_id FROM full_sync_transactions WHERE full_sync_transactions.id = full_sync_receipts.transaction_id
- );
-
-ALTER TABLE full_sync_receipts
- ALTER COLUMN block_id SET NOT NULL;
-
-ALTER TABLE full_sync_receipts
- ADD CONSTRAINT eth_blocks_fk
-FOREIGN KEY (block_id)
-REFERENCES eth_blocks (id)
-ON DELETE CASCADE;
-
-ALTER TABLE full_sync_receipts
- DROP COLUMN transaction_id;
-
-
--- +goose Down
-ALTER TABLE full_sync_receipts
- ADD COLUMN transaction_id INT;
-
-CREATE INDEX transaction_id_index ON full_sync_receipts (transaction_id);
-
-UPDATE full_sync_receipts
- SET transaction_id = (
- SELECT id FROM full_sync_transactions WHERE full_sync_transactions.hash = full_sync_receipts.tx_hash
- );
-
-ALTER TABLE full_sync_receipts
- ALTER COLUMN transaction_id SET NOT NULL;
-
-ALTER TABLE full_sync_receipts
- ADD CONSTRAINT transaction_fk
-FOREIGN KEY (transaction_id)
-REFERENCES full_sync_transactions (id)
-ON DELETE CASCADE;
-
-ALTER TABLE full_sync_receipts
- DROP COLUMN block_id;
diff --git a/db/migrations/00042_create_btc_transaction_cids_table.sql b/db/migrations/00021_create_btc_transaction_cids_table.sql
similarity index 100%
rename from db/migrations/00042_create_btc_transaction_cids_table.sql
rename to db/migrations/00021_create_btc_transaction_cids_table.sql
diff --git a/db/migrations/00022_add_eth_node_fingerprint_to_blocks.sql b/db/migrations/00022_add_eth_node_fingerprint_to_blocks.sql
deleted file mode 100644
index 268a4ccc..00000000
--- a/db/migrations/00022_add_eth_node_fingerprint_to_blocks.sql
+++ /dev/null
@@ -1,16 +0,0 @@
--- +goose Up
-ALTER TABLE eth_blocks
- ADD COLUMN eth_node_fingerprint VARCHAR(128);
-
-UPDATE eth_blocks
- SET eth_node_fingerprint = (
- SELECT eth_node_id FROM eth_nodes WHERE eth_nodes.id = eth_blocks.eth_node_id
- );
-
-ALTER TABLE eth_blocks
- ALTER COLUMN eth_node_fingerprint SET NOT NULL;
-
-
--- +goose Down
-ALTER TABLE eth_blocks
- DROP COLUMN eth_node_fingerprint;
diff --git a/db/migrations/00043_create_btc_tx_outputs_table.sql b/db/migrations/00022_create_btc_tx_outputs_table.sql
similarity index 100%
rename from db/migrations/00043_create_btc_tx_outputs_table.sql
rename to db/migrations/00022_create_btc_tx_outputs_table.sql
diff --git a/db/migrations/00044_create_btc_tx_inputs_table.sql b/db/migrations/00023_create_btc_tx_inputs_table.sql
similarity index 100%
rename from db/migrations/00044_create_btc_tx_inputs_table.sql
rename to db/migrations/00023_create_btc_tx_inputs_table.sql
diff --git a/db/migrations/00029_create_uncles_table.sql b/db/migrations/00029_create_uncles_table.sql
deleted file mode 100644
index ae8fd761..00000000
--- a/db/migrations/00029_create_uncles_table.sql
+++ /dev/null
@@ -1,16 +0,0 @@
--- +goose Up
-CREATE TABLE public.uncles (
- id SERIAL PRIMARY KEY,
- hash VARCHAR(66) NOT NULL,
- block_id INTEGER NOT NULL REFERENCES eth_blocks (id) ON DELETE CASCADE,
- reward NUMERIC NOT NULL,
- miner VARCHAR(42) NOT NULL,
- raw JSONB,
- block_timestamp NUMERIC,
- eth_node_id INTEGER NOT NULL REFERENCES eth_nodes (id) ON DELETE CASCADE,
- eth_node_fingerprint VARCHAR(128),
- UNIQUE (block_id, hash)
-);
-
--- +goose Down
-DROP TABLE public.uncles;
diff --git a/db/schema.sql b/db/schema.sql
index 6d067c4e..4dc99692 100644
--- a/db/schema.sql
+++ b/db/schema.sql
@@ -43,7 +43,8 @@ CREATE TABLE btc.header_cids (
parent_hash character varying(66) NOT NULL,
cid text NOT NULL,
"timestamp" numeric NOT NULL,
- bits bigint NOT NULL
+ bits bigint NOT NULL,
+ node_id integer NOT NULL
);
@@ -182,7 +183,8 @@ CREATE TABLE eth.header_cids (
block_hash character varying(66) NOT NULL,
parent_hash character varying(66) NOT NULL,
cid text NOT NULL,
- td bigint
+ td bigint,
+ node_id integer NOT NULL
);
@@ -407,35 +409,6 @@ CREATE SEQUENCE public.addresses_id_seq
ALTER SEQUENCE public.addresses_id_seq OWNED BY public.addresses.id;
---
--- Name: full_sync_logs; Type: TABLE; Schema: public; Owner: -
---
-
-CREATE TABLE public.full_sync_logs (
- id integer NOT NULL,
- block_number bigint,
- address character varying(66),
- tx_hash character varying(66),
- index bigint,
- topic0 character varying(66),
- topic1 character varying(66),
- topic2 character varying(66),
- topic3 character varying(66),
- data text,
- receipt_id integer
-);
-
-
---
--- Name: block_stats; Type: VIEW; Schema: public; Owner: -
---
-
-CREATE VIEW public.block_stats AS
- SELECT max(full_sync_logs.block_number) AS max_block,
- min(full_sync_logs.block_number) AS min_block
- FROM public.full_sync_logs;
-
-
--
-- Name: blocks; Type: TABLE; Schema: public; Owner: -
--
@@ -476,161 +449,6 @@ CREATE SEQUENCE public.checked_headers_id_seq
ALTER SEQUENCE public.checked_headers_id_seq OWNED BY public.checked_headers.id;
---
--- Name: eth_blocks; Type: TABLE; Schema: public; Owner: -
---
-
-CREATE TABLE public.eth_blocks (
- id integer NOT NULL,
- difficulty bigint,
- extra_data character varying,
- gas_limit bigint,
- gas_used bigint,
- hash character varying(66),
- miner character varying(42),
- nonce character varying(20),
- number bigint,
- parent_hash character varying(66),
- reward numeric,
- uncles_reward numeric,
- size character varying,
- "time" bigint,
- is_final boolean,
- uncle_hash character varying(66),
- eth_node_id integer NOT NULL,
- eth_node_fingerprint character varying(128) NOT NULL
-);
-
-
---
--- Name: eth_blocks_id_seq; Type: SEQUENCE; Schema: public; Owner: -
---
-
-CREATE SEQUENCE public.eth_blocks_id_seq
- AS integer
- START WITH 1
- INCREMENT BY 1
- NO MINVALUE
- NO MAXVALUE
- CACHE 1;
-
-
---
--- Name: eth_blocks_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
---
-
-ALTER SEQUENCE public.eth_blocks_id_seq OWNED BY public.eth_blocks.id;
-
-
---
--- Name: eth_nodes; Type: TABLE; Schema: public; Owner: -
---
-
-CREATE TABLE public.eth_nodes (
- id integer NOT NULL,
- client_name character varying,
- genesis_block character varying(66),
- network_id numeric,
- eth_node_id character varying(128)
-);
-
-
---
--- Name: full_sync_logs_id_seq; Type: SEQUENCE; Schema: public; Owner: -
---
-
-CREATE SEQUENCE public.full_sync_logs_id_seq
- AS integer
- START WITH 1
- INCREMENT BY 1
- NO MINVALUE
- NO MAXVALUE
- CACHE 1;
-
-
---
--- Name: full_sync_logs_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
---
-
-ALTER SEQUENCE public.full_sync_logs_id_seq OWNED BY public.full_sync_logs.id;
-
-
---
--- Name: full_sync_receipts; Type: TABLE; Schema: public; Owner: -
---
-
-CREATE TABLE public.full_sync_receipts (
- id integer NOT NULL,
- contract_address_id integer NOT NULL,
- cumulative_gas_used numeric,
- gas_used numeric,
- state_root character varying(66),
- status integer,
- tx_hash character varying(66),
- block_id integer NOT NULL
-);
-
-
---
--- Name: full_sync_receipts_id_seq; Type: SEQUENCE; Schema: public; Owner: -
---
-
-CREATE SEQUENCE public.full_sync_receipts_id_seq
- AS integer
- START WITH 1
- INCREMENT BY 1
- NO MINVALUE
- NO MAXVALUE
- CACHE 1;
-
-
---
--- Name: full_sync_receipts_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
---
-
-ALTER SEQUENCE public.full_sync_receipts_id_seq OWNED BY public.full_sync_receipts.id;
-
-
---
--- Name: full_sync_transactions; Type: TABLE; Schema: public; Owner: -
---
-
-CREATE TABLE public.full_sync_transactions (
- id integer NOT NULL,
- block_id integer NOT NULL,
- gas_limit numeric,
- gas_price numeric,
- hash character varying(66),
- input_data bytea,
- nonce numeric,
- raw bytea,
- tx_from character varying(66),
- tx_index integer,
- tx_to character varying(66),
- value numeric
-);
-
-
---
--- Name: full_sync_transactions_id_seq; Type: SEQUENCE; Schema: public; Owner: -
---
-
-CREATE SEQUENCE public.full_sync_transactions_id_seq
- AS integer
- START WITH 1
- INCREMENT BY 1
- NO MINVALUE
- NO MAXVALUE
- CACHE 1;
-
-
---
--- Name: full_sync_transactions_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
---
-
-ALTER SEQUENCE public.full_sync_transactions_id_seq OWNED BY public.full_sync_transactions.id;
-
-
--
-- Name: goose_db_version; Type: TABLE; Schema: public; Owner: -
--
@@ -792,7 +610,7 @@ CREATE TABLE public.headers (
raw jsonb,
block_timestamp numeric,
check_count integer DEFAULT 0 NOT NULL,
- eth_node_id integer NOT NULL,
+ node_id integer NOT NULL,
eth_node_fingerprint character varying(128)
);
@@ -818,45 +636,18 @@ ALTER SEQUENCE public.headers_id_seq OWNED BY public.headers.id;
--
--- Name: log_filters; Type: TABLE; Schema: public; Owner: -
+-- Name: nodes; Type: TABLE; Schema: public; Owner: -
--
-CREATE TABLE public.log_filters (
+CREATE TABLE public.nodes (
id integer NOT NULL,
- name character varying NOT NULL,
- from_block bigint,
- to_block bigint,
- address character varying(66),
- topic0 character varying(66),
- topic1 character varying(66),
- topic2 character varying(66),
- topic3 character varying(66),
- CONSTRAINT log_filters_from_block_check CHECK ((from_block >= 0)),
- CONSTRAINT log_filters_name_check CHECK (((name)::text <> ''::text)),
- CONSTRAINT log_filters_to_block_check CHECK ((to_block >= 0))
+ client_name character varying,
+ genesis_block character varying(66),
+ network_id character varying,
+ node_id character varying(128)
);
---
--- Name: log_filters_id_seq; Type: SEQUENCE; Schema: public; Owner: -
---
-
-CREATE SEQUENCE public.log_filters_id_seq
- AS integer
- START WITH 1
- INCREMENT BY 1
- NO MINVALUE
- NO MAXVALUE
- CACHE 1;
-
-
---
--- Name: log_filters_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
---
-
-ALTER SEQUENCE public.log_filters_id_seq OWNED BY public.log_filters.id;
-
-
--
-- Name: nodes_id_seq; Type: SEQUENCE; Schema: public; Owner: -
--
@@ -874,7 +665,7 @@ CREATE SEQUENCE public.nodes_id_seq
-- Name: nodes_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
--
-ALTER SEQUENCE public.nodes_id_seq OWNED BY public.eth_nodes.id;
+ALTER SEQUENCE public.nodes_id_seq OWNED BY public.nodes.id;
--
@@ -941,97 +732,6 @@ CREATE SEQUENCE public.storage_diff_id_seq
ALTER SEQUENCE public.storage_diff_id_seq OWNED BY public.storage_diff.id;
---
--- Name: uncles; Type: TABLE; Schema: public; Owner: -
---
-
-CREATE TABLE public.uncles (
- id integer NOT NULL,
- hash character varying(66) NOT NULL,
- block_id integer NOT NULL,
- reward numeric NOT NULL,
- miner character varying(42) NOT NULL,
- raw jsonb,
- block_timestamp numeric,
- eth_node_id integer NOT NULL,
- eth_node_fingerprint character varying(128)
-);
-
-
---
--- Name: uncles_id_seq; Type: SEQUENCE; Schema: public; Owner: -
---
-
-CREATE SEQUENCE public.uncles_id_seq
- AS integer
- START WITH 1
- INCREMENT BY 1
- NO MINVALUE
- NO MAXVALUE
- CACHE 1;
-
-
---
--- Name: uncles_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
---
-
-ALTER SEQUENCE public.uncles_id_seq OWNED BY public.uncles.id;
-
-
---
--- Name: watched_contracts; Type: TABLE; Schema: public; Owner: -
---
-
-CREATE TABLE public.watched_contracts (
- contract_id integer NOT NULL,
- contract_abi json,
- contract_hash character varying(66)
-);
-
-
---
--- Name: watched_contracts_contract_id_seq; Type: SEQUENCE; Schema: public; Owner: -
---
-
-CREATE SEQUENCE public.watched_contracts_contract_id_seq
- AS integer
- START WITH 1
- INCREMENT BY 1
- NO MINVALUE
- NO MAXVALUE
- CACHE 1;
-
-
---
--- Name: watched_contracts_contract_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
---
-
-ALTER SEQUENCE public.watched_contracts_contract_id_seq OWNED BY public.watched_contracts.contract_id;
-
-
---
--- Name: watched_event_logs; Type: VIEW; Schema: public; Owner: -
---
-
-CREATE VIEW public.watched_event_logs AS
- SELECT log_filters.name,
- full_sync_logs.id,
- full_sync_logs.block_number,
- full_sync_logs.address,
- full_sync_logs.tx_hash,
- full_sync_logs.index,
- full_sync_logs.topic0,
- full_sync_logs.topic1,
- full_sync_logs.topic2,
- full_sync_logs.topic3,
- full_sync_logs.data,
- full_sync_logs.receipt_id
- FROM ((public.log_filters
- CROSS JOIN public.block_stats)
- JOIN public.full_sync_logs ON ((((full_sync_logs.address)::text = (log_filters.address)::text) AND (full_sync_logs.block_number >= COALESCE(log_filters.from_block, block_stats.min_block)) AND (full_sync_logs.block_number <= COALESCE(log_filters.to_block, block_stats.max_block)))))
- WHERE ((((log_filters.topic0)::text = (full_sync_logs.topic0)::text) OR (log_filters.topic0 IS NULL)) AND (((log_filters.topic1)::text = (full_sync_logs.topic1)::text) OR (log_filters.topic1 IS NULL)) AND (((log_filters.topic2)::text = (full_sync_logs.topic2)::text) OR (log_filters.topic2 IS NULL)) AND (((log_filters.topic3)::text = (full_sync_logs.topic3)::text) OR (log_filters.topic3 IS NULL)));
-
-
--
-- Name: watched_logs; Type: TABLE; Schema: public; Owner: -
--
@@ -1147,41 +847,6 @@ ALTER TABLE ONLY public.addresses ALTER COLUMN id SET DEFAULT nextval('public.ad
ALTER TABLE ONLY public.checked_headers ALTER COLUMN id SET DEFAULT nextval('public.checked_headers_id_seq'::regclass);
---
--- Name: eth_blocks id; Type: DEFAULT; Schema: public; Owner: -
---
-
-ALTER TABLE ONLY public.eth_blocks ALTER COLUMN id SET DEFAULT nextval('public.eth_blocks_id_seq'::regclass);
-
-
---
--- Name: eth_nodes id; Type: DEFAULT; Schema: public; Owner: -
---
-
-ALTER TABLE ONLY public.eth_nodes ALTER COLUMN id SET DEFAULT nextval('public.nodes_id_seq'::regclass);
-
-
---
--- Name: full_sync_logs id; Type: DEFAULT; Schema: public; Owner: -
---
-
-ALTER TABLE ONLY public.full_sync_logs ALTER COLUMN id SET DEFAULT nextval('public.full_sync_logs_id_seq'::regclass);
-
-
---
--- Name: full_sync_receipts id; Type: DEFAULT; Schema: public; Owner: -
---
-
-ALTER TABLE ONLY public.full_sync_receipts ALTER COLUMN id SET DEFAULT nextval('public.full_sync_receipts_id_seq'::regclass);
-
-
---
--- Name: full_sync_transactions id; Type: DEFAULT; Schema: public; Owner: -
---
-
-ALTER TABLE ONLY public.full_sync_transactions ALTER COLUMN id SET DEFAULT nextval('public.full_sync_transactions_id_seq'::regclass);
-
-
--
-- Name: goose_db_version id; Type: DEFAULT; Schema: public; Owner: -
--
@@ -1218,10 +883,10 @@ ALTER TABLE ONLY public.headers ALTER COLUMN id SET DEFAULT nextval('public.head
--
--- Name: log_filters id; Type: DEFAULT; Schema: public; Owner: -
+-- Name: nodes id; Type: DEFAULT; Schema: public; Owner: -
--
-ALTER TABLE ONLY public.log_filters ALTER COLUMN id SET DEFAULT nextval('public.log_filters_id_seq'::regclass);
+ALTER TABLE ONLY public.nodes ALTER COLUMN id SET DEFAULT nextval('public.nodes_id_seq'::regclass);
--
@@ -1238,20 +903,6 @@ ALTER TABLE ONLY public.queued_storage ALTER COLUMN id SET DEFAULT nextval('publ
ALTER TABLE ONLY public.storage_diff ALTER COLUMN id SET DEFAULT nextval('public.storage_diff_id_seq'::regclass);
---
--- Name: uncles id; Type: DEFAULT; Schema: public; Owner: -
---
-
-ALTER TABLE ONLY public.uncles ALTER COLUMN id SET DEFAULT nextval('public.uncles_id_seq'::regclass);
-
-
---
--- Name: watched_contracts contract_id; Type: DEFAULT; Schema: public; Owner: -
---
-
-ALTER TABLE ONLY public.watched_contracts ALTER COLUMN contract_id SET DEFAULT nextval('public.watched_contracts_contract_id_seq'::regclass);
-
-
--
-- Name: watched_logs id; Type: DEFAULT; Schema: public; Owner: -
--
@@ -1451,54 +1102,6 @@ ALTER TABLE ONLY public.checked_headers
ADD CONSTRAINT checked_headers_pkey PRIMARY KEY (id);
---
--- Name: eth_blocks eth_blocks_pkey; Type: CONSTRAINT; Schema: public; Owner: -
---
-
-ALTER TABLE ONLY public.eth_blocks
- ADD CONSTRAINT eth_blocks_pkey PRIMARY KEY (id);
-
-
---
--- Name: eth_blocks eth_node_id_block_number_uc; Type: CONSTRAINT; Schema: public; Owner: -
---
-
-ALTER TABLE ONLY public.eth_blocks
- ADD CONSTRAINT eth_node_id_block_number_uc UNIQUE (number, eth_node_id);
-
-
---
--- Name: eth_nodes eth_node_uc; Type: CONSTRAINT; Schema: public; Owner: -
---
-
-ALTER TABLE ONLY public.eth_nodes
- ADD CONSTRAINT eth_node_uc UNIQUE (genesis_block, network_id, eth_node_id);
-
-
---
--- Name: full_sync_logs full_sync_logs_pkey; Type: CONSTRAINT; Schema: public; Owner: -
---
-
-ALTER TABLE ONLY public.full_sync_logs
- ADD CONSTRAINT full_sync_logs_pkey PRIMARY KEY (id);
-
-
---
--- Name: full_sync_receipts full_sync_receipts_pkey; Type: CONSTRAINT; Schema: public; Owner: -
---
-
-ALTER TABLE ONLY public.full_sync_receipts
- ADD CONSTRAINT full_sync_receipts_pkey PRIMARY KEY (id);
-
-
---
--- Name: full_sync_transactions full_sync_transactions_pkey; Type: CONSTRAINT; Schema: public; Owner: -
---
-
-ALTER TABLE ONLY public.full_sync_transactions
- ADD CONSTRAINT full_sync_transactions_pkey PRIMARY KEY (id);
-
-
--
-- Name: goose_db_version goose_db_version_pkey; Type: CONSTRAINT; Schema: public; Owner: -
--
@@ -1572,18 +1175,18 @@ ALTER TABLE ONLY public.headers
--
--- Name: log_filters name_uc; Type: CONSTRAINT; Schema: public; Owner: -
+-- Name: nodes node_uc; Type: CONSTRAINT; Schema: public; Owner: -
--
-ALTER TABLE ONLY public.log_filters
- ADD CONSTRAINT name_uc UNIQUE (name);
+ALTER TABLE ONLY public.nodes
+ ADD CONSTRAINT node_uc UNIQUE (genesis_block, network_id, node_id);
--
--- Name: eth_nodes nodes_pkey; Type: CONSTRAINT; Schema: public; Owner: -
+-- Name: nodes nodes_pkey; Type: CONSTRAINT; Schema: public; Owner: -
--
-ALTER TABLE ONLY public.eth_nodes
+ALTER TABLE ONLY public.nodes
ADD CONSTRAINT nodes_pkey PRIMARY KEY (id);
@@ -1619,38 +1222,6 @@ ALTER TABLE ONLY public.storage_diff
ADD CONSTRAINT storage_diff_pkey PRIMARY KEY (id);
---
--- Name: uncles uncles_block_id_hash_key; Type: CONSTRAINT; Schema: public; Owner: -
---
-
-ALTER TABLE ONLY public.uncles
- ADD CONSTRAINT uncles_block_id_hash_key UNIQUE (block_id, hash);
-
-
---
--- Name: uncles uncles_pkey; Type: CONSTRAINT; Schema: public; Owner: -
---
-
-ALTER TABLE ONLY public.uncles
- ADD CONSTRAINT uncles_pkey PRIMARY KEY (id);
-
-
---
--- Name: watched_contracts watched_contracts_contract_hash_key; Type: CONSTRAINT; Schema: public; Owner: -
---
-
-ALTER TABLE ONLY public.watched_contracts
- ADD CONSTRAINT watched_contracts_contract_hash_key UNIQUE (contract_hash);
-
-
---
--- Name: watched_contracts watched_contracts_pkey; Type: CONSTRAINT; Schema: public; Owner: -
---
-
-ALTER TABLE ONLY public.watched_contracts
- ADD CONSTRAINT watched_contracts_pkey PRIMARY KEY (contract_id);
-
-
--
-- Name: watched_logs watched_logs_pkey; Type: CONSTRAINT; Schema: public; Owner: -
--
@@ -1659,13 +1230,6 @@ ALTER TABLE ONLY public.watched_logs
ADD CONSTRAINT watched_logs_pkey PRIMARY KEY (id);
---
--- Name: block_id_index; Type: INDEX; Schema: public; Owner: -
---
-
-CREATE INDEX block_id_index ON public.full_sync_transactions USING btree (block_id);
-
-
--
-- Name: header_sync_receipts_header; Type: INDEX; Schema: public; Owner: -
--
@@ -1709,31 +1273,11 @@ CREATE INDEX headers_block_timestamp ON public.headers USING btree (block_timest
--
--- Name: node_id_index; Type: INDEX; Schema: public; Owner: -
+-- Name: header_cids header_cids_node_id_fkey; Type: FK CONSTRAINT; Schema: btc; Owner: -
--
-CREATE INDEX node_id_index ON public.eth_blocks USING btree (eth_node_id);
-
-
---
--- Name: number_index; Type: INDEX; Schema: public; Owner: -
---
-
-CREATE INDEX number_index ON public.eth_blocks USING btree (number);
-
-
---
--- Name: tx_from_index; Type: INDEX; Schema: public; Owner: -
---
-
-CREATE INDEX tx_from_index ON public.full_sync_transactions USING btree (tx_from);
-
-
---
--- Name: tx_to_index; Type: INDEX; Schema: public; Owner: -
---
-
-CREATE INDEX tx_to_index ON public.full_sync_transactions USING btree (tx_to);
+ALTER TABLE ONLY btc.header_cids
+ ADD CONSTRAINT header_cids_node_id_fkey FOREIGN KEY (node_id) REFERENCES public.nodes(id) ON DELETE CASCADE;
--
@@ -1768,6 +1312,14 @@ ALTER TABLE ONLY btc.tx_outputs
ADD CONSTRAINT tx_outputs_tx_id_fkey FOREIGN KEY (tx_id) REFERENCES btc.transaction_cids(id) ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED;
+--
+-- Name: header_cids header_cids_node_id_fkey; Type: FK CONSTRAINT; Schema: eth; Owner: -
+--
+
+ALTER TABLE ONLY eth.header_cids
+ ADD CONSTRAINT header_cids_node_id_fkey FOREIGN KEY (node_id) REFERENCES public.nodes(id) ON DELETE CASCADE;
+
+
--
-- Name: receipt_cids receipt_cids_tx_id_fkey; Type: FK CONSTRAINT; Schema: eth; Owner: -
--
@@ -1816,30 +1368,6 @@ ALTER TABLE ONLY public.checked_headers
ADD CONSTRAINT checked_headers_header_id_fkey FOREIGN KEY (header_id) REFERENCES public.headers(id) ON DELETE CASCADE;
---
--- Name: full_sync_receipts eth_blocks_fk; Type: FK CONSTRAINT; Schema: public; Owner: -
---
-
-ALTER TABLE ONLY public.full_sync_receipts
- ADD CONSTRAINT eth_blocks_fk FOREIGN KEY (block_id) REFERENCES public.eth_blocks(id) ON DELETE CASCADE;
-
-
---
--- Name: full_sync_receipts full_sync_receipts_contract_address_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: -
---
-
-ALTER TABLE ONLY public.full_sync_receipts
- ADD CONSTRAINT full_sync_receipts_contract_address_id_fkey FOREIGN KEY (contract_address_id) REFERENCES public.addresses(id) ON DELETE CASCADE;
-
-
---
--- Name: full_sync_transactions full_sync_transactions_block_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: -
---
-
-ALTER TABLE ONLY public.full_sync_transactions
- ADD CONSTRAINT full_sync_transactions_block_id_fkey FOREIGN KEY (block_id) REFERENCES public.eth_blocks(id) ON DELETE CASCADE;
-
-
--
-- Name: header_sync_logs header_sync_logs_address_fkey; Type: FK CONSTRAINT; Schema: public; Owner: -
--
@@ -1889,19 +1417,11 @@ ALTER TABLE ONLY public.header_sync_transactions
--
--- Name: headers headers_eth_node_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: -
+-- Name: headers headers_node_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.headers
- ADD CONSTRAINT headers_eth_node_id_fkey FOREIGN KEY (eth_node_id) REFERENCES public.eth_nodes(id) ON DELETE CASCADE;
-
-
---
--- Name: eth_blocks node_fk; Type: FK CONSTRAINT; Schema: public; Owner: -
---
-
-ALTER TABLE ONLY public.eth_blocks
- ADD CONSTRAINT node_fk FOREIGN KEY (eth_node_id) REFERENCES public.eth_nodes(id) ON DELETE CASCADE;
+ ADD CONSTRAINT headers_node_id_fkey FOREIGN KEY (node_id) REFERENCES public.nodes(id) ON DELETE CASCADE;
--
@@ -1912,30 +1432,6 @@ ALTER TABLE ONLY public.queued_storage
ADD CONSTRAINT queued_storage_diff_id_fkey FOREIGN KEY (diff_id) REFERENCES public.storage_diff(id);
---
--- Name: full_sync_logs receipts_fk; Type: FK CONSTRAINT; Schema: public; Owner: -
---
-
-ALTER TABLE ONLY public.full_sync_logs
- ADD CONSTRAINT receipts_fk FOREIGN KEY (receipt_id) REFERENCES public.full_sync_receipts(id) ON DELETE CASCADE;
-
-
---
--- Name: uncles uncles_block_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: -
---
-
-ALTER TABLE ONLY public.uncles
- ADD CONSTRAINT uncles_block_id_fkey FOREIGN KEY (block_id) REFERENCES public.eth_blocks(id) ON DELETE CASCADE;
-
-
---
--- Name: uncles uncles_eth_node_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: -
---
-
-ALTER TABLE ONLY public.uncles
- ADD CONSTRAINT uncles_eth_node_id_fkey FOREIGN KEY (eth_node_id) REFERENCES public.eth_nodes(id) ON DELETE CASCADE;
-
-
--
-- PostgreSQL database dump complete
--
diff --git a/documentation/data-syncing.md b/documentation/data-syncing.md
index 52785ba5..42803080 100644
--- a/documentation/data-syncing.md
+++ b/documentation/data-syncing.md
@@ -25,57 +25,3 @@ different from what we have already stored in the database, the header record wi
ipcPath =
```
- Alternatively, the ipc path can be passed as a flag instead `--client-ipcPath`.
-
-## fullSync
-Syncs blocks, transactions, receipts and logs from a running Ethereum node into VulcanizeDB tables named
-`blocks`, `uncles`, `full_sync_transactions`, `full_sync_receipts` and `logs`.
-- Queries the Ethereum node using RPC calls.
-- Validates headers from the last 15 blocks to ensure that data is up to date.
-- Useful when you want to maintain a broad cache of what's happening on the blockchain.
-- Handles chain reorgs by [validating the most recent blocks' hashes](../pkg/history/header_validator.go). If the hash is
-different from what we have already stored in the database, the header record will be updated.
-
-#### Usage
-- Run `./vulcanizedb fullSync --config --starting-block-number `
-- The config file must be formatted as follows, and should contain an ipc path to a running Ethereum node:
-```toml
-[database]
- name = "vulcanize_public"
- hostname = "localhost"
- user = "vulcanize"
- password = "vulcanize"
- port = 5432
-
-[client]
- ipcPath =
-```
-- Alternatively, the ipc path can be passed as a flag instead `--client-ipcPath`.
-
-*Please note, that if you are fast syncing your Ethereum node, wait for the initial sync to finish.*
-
-## coldImport
-Syncs VulcanizeDB from Geth's underlying LevelDB datastore and persists Ethereum blocks,
-transactions, receipts and logs into VulcanizeDB tables named `blocks`, `uncles`,
-`full_sync_transactions`, `full_sync_receipts` and `logs` respectively.
-
-#### Usage
-1. Ensure the Ethereum node you're point at is not running, and that it has synced to the desired block height.
-1. Run `./vulcanizedb coldImport --config `
-1. Optional flags:
- - `--starting-block-number `/`-s `: block number to start syncing from
- - `--ending-block-number `/`-e `: block number to sync to
- - `--all`/`-a`: sync all missing blocks
-
-The config file can be formatted as follows, and must contain the LevelDB path.
-
-```toml
-[database]
- name = "vulcanize_public"
- hostname = "localhost"
- user = "vulcanize"
- password = "vulcanize"
- port = 5432
-
-[client]
- leveldbpath = "/Users/user/Library/Ethereum/geth/chaindata"
-```
diff --git a/environments/infura.toml b/environments/infura.toml
deleted file mode 100644
index fb9876fb..00000000
--- a/environments/infura.toml
+++ /dev/null
@@ -1,7 +0,0 @@
-[database]
- name = "vulcanize_public"
- hostname = "localhost"
- port = 5432
-
-[client]
- ipcPath = ""
diff --git a/environments/superNode.toml b/environments/superNode.toml
deleted file mode 100644
index 39391864..00000000
--- a/environments/superNode.toml
+++ /dev/null
@@ -1,25 +0,0 @@
-[superNode]
- chain = "ethereum"
- ipfsPath = "/root/.ipfs"
-
- [superNode.database]
- name = "vulcanize_public"
- hostname = "localhost"
- port = 5432
- user = "ec2-user"
-
- [superNode.sync]
- on = true
- wsPath = "ws://127.0.0.1:8546"
- workers = 1
-
- [superNode.server]
- on = true
- ipcPath = "/root/.vulcanize/vulcanize.ipc"
- wsPath = "127.0.0.1:8080"
-
- [superNode.backFill]
- on = true
- httpPath = "http://127.0.0.1:8545"
- frequency = 5
- batchSize = 50
\ No newline at end of file
diff --git a/integration_test/contract_watcher_full_transformer_test.go b/integration_test/contract_watcher_full_transformer_test.go
deleted file mode 100644
index 0f035b5d..00000000
--- a/integration_test/contract_watcher_full_transformer_test.go
+++ /dev/null
@@ -1,338 +0,0 @@
-// VulcanizeDB
-// Copyright © 2019 Vulcanize
-
-// This program is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Affero General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-
-// This program is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Affero General Public License for more details.
-
-// You should have received a copy of the GNU Affero General Public License
-// along with this program. If not, see .
-
-package integration
-
-import (
- "fmt"
- "math/rand"
- "strings"
- "time"
-
- "github.com/ethereum/go-ethereum/common"
- . "github.com/onsi/ginkgo"
- . "github.com/onsi/gomega"
-
- "github.com/vulcanize/vulcanizedb/pkg/config"
- "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/full/transformer"
- "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/constants"
- "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/helpers/test_helpers"
- "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/helpers/test_helpers/mocks"
- "github.com/vulcanize/vulcanizedb/pkg/eth/core"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres/repositories"
-)
-
-var _ = Describe("contractWatcher full transformer", func() {
- var db *postgres.DB
- var err error
- var blockChain core.BlockChain
- var blockRepository repositories.BlockRepository
- var ensAddr = strings.ToLower(constants.EnsContractAddress)
- var tusdAddr = strings.ToLower(constants.TusdContractAddress)
- rand.Seed(time.Now().UnixNano())
-
- BeforeEach(func() {
- db, blockChain = test_helpers.SetupDBandBC()
- blockRepository = *repositories.NewBlockRepository(db)
- })
-
- AfterEach(func() {
- test_helpers.TearDown(db)
- })
-
- Describe("Init", func() {
- It("Initializes transformer's contract objects", func() {
- _, insertErr := blockRepository.CreateOrUpdateBlock(mocks.TransferBlock1)
- Expect(insertErr).NotTo(HaveOccurred())
- _, insertErrTwo := blockRepository.CreateOrUpdateBlock(mocks.TransferBlock2)
- Expect(insertErrTwo).NotTo(HaveOccurred())
- t := transformer.NewTransformer(test_helpers.TusdConfig, blockChain, db)
- err = t.Init()
- Expect(err).ToNot(HaveOccurred())
-
- c, ok := t.Contracts[tusdAddr]
- Expect(ok).To(Equal(true))
-
- Expect(c.StartingBlock).To(Equal(int64(6194633)))
- Expect(c.Abi).To(Equal(constants.TusdAbiString))
- Expect(c.Name).To(Equal("TrueUSD"))
- Expect(c.Address).To(Equal(tusdAddr))
- })
-
- It("Fails to initialize if first and most recent blocks cannot be fetched from vDB", func() {
- t := transformer.NewTransformer(test_helpers.TusdConfig, blockChain, db)
- err = t.Init()
- Expect(err).To(HaveOccurred())
- Expect(err.Error()).To(ContainSubstring("no rows in result set"))
- })
-
- It("Does nothing if watched events are unset", func() {
- _, insertErr := blockRepository.CreateOrUpdateBlock(mocks.TransferBlock1)
- Expect(insertErr).NotTo(HaveOccurred())
- _, insertErrTwo := blockRepository.CreateOrUpdateBlock(mocks.TransferBlock2)
- Expect(insertErrTwo).NotTo(HaveOccurred())
- var testConf config.ContractConfig
- testConf = test_helpers.TusdConfig
- testConf.Events = nil
- t := transformer.NewTransformer(testConf, blockChain, db)
- err = t.Init()
- Expect(err).To(HaveOccurred())
- Expect(err.Error()).To(ContainSubstring("no filters created"))
-
- _, ok := t.Contracts[tusdAddr]
- Expect(ok).To(Equal(false))
- })
- })
-
- Describe("Execute", func() {
- BeforeEach(func() {
- _, insertErr := blockRepository.CreateOrUpdateBlock(mocks.TransferBlock1)
- Expect(insertErr).NotTo(HaveOccurred())
- _, insertErrTwo := blockRepository.CreateOrUpdateBlock(mocks.TransferBlock2)
- Expect(insertErrTwo).NotTo(HaveOccurred())
- })
-
- It("Transforms watched contract data into custom repositories", func() {
- t := transformer.NewTransformer(test_helpers.TusdConfig, blockChain, db)
- err = t.Init()
- Expect(err).ToNot(HaveOccurred())
-
- err = t.Execute()
- Expect(err).ToNot(HaveOccurred())
-
- log := test_helpers.TransferLog{}
- err = db.QueryRowx(fmt.Sprintf("SELECT * FROM full_%s.transfer_event WHERE block = 6194634", tusdAddr)).StructScan(&log)
-
- // We don't know vulcID, so compare individual fields instead of complete structures
- Expect(log.Tx).To(Equal("0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad654eee"))
- Expect(log.Block).To(Equal(int64(6194634)))
- Expect(log.From).To(Equal("0x000000000000000000000000000000000000Af21"))
- Expect(log.To).To(Equal("0x09BbBBE21a5975cAc061D82f7b843bCE061BA391"))
- Expect(log.Value).To(Equal("1097077688018008265106216665536940668749033598146"))
- })
-
- It("Keeps track of contract-related addresses while transforming event data if they need to be used for later method polling", func() {
- var testConf config.ContractConfig
- testConf = test_helpers.TusdConfig
- testConf.Methods = map[string][]string{
- tusdAddr: {"balanceOf"},
- }
- t := transformer.NewTransformer(testConf, blockChain, db)
- err = t.Init()
- Expect(err).ToNot(HaveOccurred())
-
- c, ok := t.Contracts[tusdAddr]
- Expect(ok).To(Equal(true))
-
- err = t.Execute()
- Expect(err).ToNot(HaveOccurred())
-
- b, ok := c.EmittedAddrs[common.HexToAddress("0x000000000000000000000000000000000000Af21")]
- Expect(ok).To(Equal(true))
- Expect(b).To(Equal(true))
-
- b, ok = c.EmittedAddrs[common.HexToAddress("0x09BbBBE21a5975cAc061D82f7b843bCE061BA391")]
- Expect(ok).To(Equal(true))
- Expect(b).To(Equal(true))
-
- _, ok = c.EmittedAddrs[common.HexToAddress("0x09BbBBE21a5975cAc061D82f7b843b1234567890")]
- Expect(ok).To(Equal(false))
-
- _, ok = c.EmittedAddrs[common.HexToAddress("0x")]
- Expect(ok).To(Equal(false))
-
- _, ok = c.EmittedAddrs[""]
- Expect(ok).To(Equal(false))
-
- _, ok = c.EmittedAddrs[common.HexToAddress("0x09THISE21a5IS5cFAKE1D82fAND43bCE06MADEUP")]
- Expect(ok).To(Equal(false))
- })
-
- It("Polls given methods using generated token holder address", func() {
- var testConf config.ContractConfig
- testConf = test_helpers.TusdConfig
- testConf.Methods = map[string][]string{
- tusdAddr: {"balanceOf"},
- }
- t := transformer.NewTransformer(testConf, blockChain, db)
- err = t.Init()
- Expect(err).ToNot(HaveOccurred())
-
- err = t.Execute()
- Expect(err).ToNot(HaveOccurred())
-
- res := test_helpers.BalanceOf{}
-
- err = db.QueryRowx(fmt.Sprintf("SELECT * FROM full_%s.balanceof_method WHERE who_ = '0x000000000000000000000000000000000000Af21' AND block = '6194634'", tusdAddr)).StructScan(&res)
- Expect(err).ToNot(HaveOccurred())
- Expect(res.Balance).To(Equal("0"))
- Expect(res.TokenName).To(Equal("TrueUSD"))
-
- err = db.QueryRowx(fmt.Sprintf("SELECT * FROM full_%s.balanceof_method WHERE who_ = '0x09BbBBE21a5975cAc061D82f7b843bCE061BA391' AND block = '6194634'", tusdAddr)).StructScan(&res)
- Expect(err).ToNot(HaveOccurred())
- Expect(res.Balance).To(Equal("0"))
- Expect(res.TokenName).To(Equal("TrueUSD"))
-
- err = db.QueryRowx(fmt.Sprintf("SELECT * FROM full_%s.balanceof_method WHERE who_ = '0xfE9e8709d3215310075d67E3ed32A380CCf451C8' AND block = '6194634'", tusdAddr)).StructScan(&res)
- Expect(err).To(HaveOccurred())
- Expect(err.Error()).To(ContainSubstring("no rows in result set"))
- })
-
- It("Fails if initialization has not been done", func() {
- t := transformer.NewTransformer(test_helpers.TusdConfig, blockChain, db)
-
- err = t.Execute()
- Expect(err).To(HaveOccurred())
- Expect(err.Error()).To(ContainSubstring("transformer has no initialized contracts to work with"))
- })
- })
-
- Describe("Execute- against ENS registry contract", func() {
- BeforeEach(func() {
- _, insertErr := blockRepository.CreateOrUpdateBlock(mocks.NewOwnerBlock1)
- Expect(insertErr).NotTo(HaveOccurred())
- _, insertErrTwo := blockRepository.CreateOrUpdateBlock(mocks.NewOwnerBlock2)
- Expect(insertErrTwo).NotTo(HaveOccurred())
- })
-
- It("Transforms watched contract data into custom repositories", func() {
- t := transformer.NewTransformer(test_helpers.ENSConfig, blockChain, db)
-
- err = t.Init()
- Expect(err).ToNot(HaveOccurred())
-
- err = t.Execute()
- Expect(err).ToNot(HaveOccurred())
-
- log := test_helpers.NewOwnerLog{}
- err = db.QueryRowx(fmt.Sprintf("SELECT * FROM full_%s.newowner_event", ensAddr)).StructScan(&log)
-
- // We don't know vulcID, so compare individual fields instead of complete structures
- Expect(log.Tx).To(Equal("0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad654bbb"))
- Expect(log.Block).To(Equal(int64(6194635)))
- Expect(log.Node).To(Equal("0x0000000000000000000000000000000000000000000000000000c02aaa39b223"))
- Expect(log.Label).To(Equal("0x9dd48110dcc444fdc242510c09bbbbe21a5975cac061d82f7b843bce061ba391"))
- Expect(log.Owner).To(Equal("0x000000000000000000000000000000000000Af21"))
- })
-
- It("Keeps track of contract-related hashes while transforming event data if they need to be used for later method polling", func() {
- var testConf config.ContractConfig
- testConf = test_helpers.ENSConfig
- testConf.Methods = map[string][]string{
- ensAddr: {"owner"},
- }
- t := transformer.NewTransformer(testConf, blockChain, db)
- err = t.Init()
- Expect(err).ToNot(HaveOccurred())
-
- c, ok := t.Contracts[ensAddr]
- Expect(ok).To(Equal(true))
-
- err = t.Execute()
- Expect(err).ToNot(HaveOccurred())
- Expect(len(c.EmittedHashes)).To(Equal(3))
-
- b, ok := c.EmittedHashes[common.HexToHash("0x0000000000000000000000000000000000000000000000000000c02aaa39b223")]
- Expect(ok).To(Equal(true))
- Expect(b).To(Equal(true))
-
- b, ok = c.EmittedHashes[common.HexToHash("0x9dd48110dcc444fdc242510c09bbbbe21a5975cac061d82f7b843bce061ba391")]
- Expect(ok).To(Equal(true))
- Expect(b).To(Equal(true))
-
- // Doesn't keep track of address since it wouldn't be used in calling the 'owner' method
- _, ok = c.EmittedAddrs[common.HexToAddress("0x000000000000000000000000000000000000Af21")]
- Expect(ok).To(Equal(false))
- })
-
- It("Polls given methods using generated token holder address", func() {
- var testConf config.ContractConfig
- testConf = test_helpers.ENSConfig
- testConf.Methods = map[string][]string{
- ensAddr: {"owner"},
- }
- t := transformer.NewTransformer(testConf, blockChain, db)
- err = t.Init()
- Expect(err).ToNot(HaveOccurred())
-
- err = t.Execute()
- Expect(err).ToNot(HaveOccurred())
-
- res := test_helpers.Owner{}
- err = db.QueryRowx(fmt.Sprintf("SELECT * FROM full_%s.owner_method WHERE node_ = '0x0000000000000000000000000000000000000000000000000000c02aaa39b223' AND block = '6194636'", ensAddr)).StructScan(&res)
- Expect(err).ToNot(HaveOccurred())
- Expect(res.Address).To(Equal("0x0000000000000000000000000000000000000000"))
- Expect(res.TokenName).To(Equal(""))
-
- err = db.QueryRowx(fmt.Sprintf("SELECT * FROM full_%s.owner_method WHERE node_ = '0x9dd48110dcc444fdc242510c09bbbbe21a5975cac061d82f7b843bce061ba391' AND block = '6194636'", ensAddr)).StructScan(&res)
- Expect(err).ToNot(HaveOccurred())
- Expect(res.Address).To(Equal("0x0000000000000000000000000000000000000000"))
- Expect(res.TokenName).To(Equal(""))
-
- err = db.QueryRowx(fmt.Sprintf("SELECT * FROM full_%s.owner_method WHERE node_ = '0x9THIS110dcc444fIS242510c09bbAbe21aFAKEcacNODE82f7b843HASH61ba391' AND block = '6194636'", ensAddr)).StructScan(&res)
- Expect(err).To(HaveOccurred())
- Expect(err.Error()).To(ContainSubstring("no rows in result set"))
- })
-
- It("It does not perist events if they do not pass the emitted arg filter", func() {
- var testConf config.ContractConfig
- testConf = test_helpers.ENSConfig
- testConf.EventArgs = map[string][]string{
- ensAddr: {"fake_filter_value"},
- }
- t := transformer.NewTransformer(testConf, blockChain, db)
-
- err = t.Init()
- Expect(err).ToNot(HaveOccurred())
-
- err = t.Execute()
- Expect(err).ToNot(HaveOccurred())
-
- log := test_helpers.HeaderSyncNewOwnerLog{}
- err = db.QueryRowx(fmt.Sprintf("SELECT * FROM full_%s.newowner_event", ensAddr)).StructScan(&log)
- Expect(err).To(HaveOccurred())
- Expect(err.Error()).To(ContainSubstring("does not exist"))
- })
-
- It("If a method arg filter is applied, only those arguments are used in polling", func() {
- var testConf config.ContractConfig
- testConf = test_helpers.ENSConfig
- testConf.MethodArgs = map[string][]string{
- ensAddr: {"0x0000000000000000000000000000000000000000000000000000c02aaa39b223"},
- }
- testConf.Methods = map[string][]string{
- ensAddr: {"owner"},
- }
- t := transformer.NewTransformer(testConf, blockChain, db)
- err = t.Init()
- Expect(err).ToNot(HaveOccurred())
-
- err = t.Execute()
- Expect(err).ToNot(HaveOccurred())
-
- res := test_helpers.Owner{}
- err = db.QueryRowx(fmt.Sprintf("SELECT * FROM full_%s.owner_method WHERE node_ = '0x0000000000000000000000000000000000000000000000000000c02aaa39b223' AND block = '6194636'", ensAddr)).StructScan(&res)
- Expect(err).ToNot(HaveOccurred())
- Expect(res.Address).To(Equal("0x0000000000000000000000000000000000000000"))
- Expect(res.TokenName).To(Equal(""))
-
- err = db.QueryRowx(fmt.Sprintf("SELECT * FROM full_%s.owner_method WHERE node_ = '0x9dd48110dcc444fdc242510c09bbbbe21a5975cac061d82f7b843bce061ba391' AND block = '6194636'", ensAddr)).StructScan(&res)
- Expect(err).To(HaveOccurred())
- Expect(err.Error()).To(ContainSubstring("no rows in result set"))
- })
- })
-})
diff --git a/integration_test/contract_watcher_header_sync_transformer_test.go b/integration_test/contract_watcher_header_sync_transformer_test.go
index 1c816518..0e0d3c9d 100644
--- a/integration_test/contract_watcher_header_sync_transformer_test.go
+++ b/integration_test/contract_watcher_header_sync_transformer_test.go
@@ -30,8 +30,8 @@ import (
"github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/helpers/test_helpers"
"github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/helpers/test_helpers/mocks"
"github.com/vulcanize/vulcanizedb/pkg/eth/core"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
"github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres/repositories"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
)
var _ = Describe("contractWatcher headerSync transformer", func() {
diff --git a/integration_test/geth_blockchain_test.go b/integration_test/geth_blockchain_test.go
index fb8521b9..79cbf64d 100644
--- a/integration_test/geth_blockchain_test.go
+++ b/integration_test/geth_blockchain_test.go
@@ -27,8 +27,6 @@ import (
"github.com/vulcanize/vulcanizedb/pkg/eth/client"
rpc2 "github.com/vulcanize/vulcanizedb/pkg/eth/converters/rpc"
"github.com/vulcanize/vulcanizedb/pkg/eth/core"
- "github.com/vulcanize/vulcanizedb/pkg/eth/fakes"
- "github.com/vulcanize/vulcanizedb/pkg/eth/history"
"github.com/vulcanize/vulcanizedb/pkg/eth/node"
"github.com/vulcanize/vulcanizedb/test_config"
)
@@ -47,19 +45,6 @@ var _ = Describe("Reading from the Geth blockchain", func() {
blockChain = eth.NewBlockChain(blockChainClient, rpcClient, node, transactionConverter)
})
- It("reads two blocks", func(done Done) {
- blocks := fakes.NewMockBlockRepository()
- lastBlock, err := blockChain.LastBlock()
- Expect(err).NotTo(HaveOccurred())
-
- queriedBlocks := []int64{lastBlock.Int64() - 5, lastBlock.Int64() - 6}
- _, err = history.RetrieveAndUpdateBlocks(blockChain, blocks, queriedBlocks)
- Expect(err).NotTo(HaveOccurred())
-
- blocks.AssertCreateOrUpdateBlocksCallCountAndBlockNumbersEquals(2, []int64{lastBlock.Int64() - 5, lastBlock.Int64() - 6})
- close(done)
- }, 30)
-
It("retrieves the genesis block and first block", func(done Done) {
genesisBlock, err := blockChain.GetBlockByNumber(int64(0))
Expect(err).ToNot(HaveOccurred())
diff --git a/integration_test/poller_test.go b/integration_test/poller_test.go
index 386fc837..a1ea02bc 100644
--- a/integration_test/poller_test.go
+++ b/integration_test/poller_test.go
@@ -29,7 +29,7 @@ import (
"github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/poller"
"github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/types"
"github.com/vulcanize/vulcanizedb/pkg/eth/core"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
)
var _ = Describe("Poller", func() {
diff --git a/libraries/shared/factories/event/converter.go b/libraries/shared/factories/event/converter.go
index 48438f5e..beed46a4 100644
--- a/libraries/shared/factories/event/converter.go
+++ b/libraries/shared/factories/event/converter.go
@@ -18,7 +18,7 @@ package event
import (
"github.com/vulcanize/vulcanizedb/pkg/eth/core"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
)
// Converter transforms log data into general InsertionModels the Repository can persist__
diff --git a/libraries/shared/factories/event/repository.go b/libraries/shared/factories/event/repository.go
index 72c039bb..934f9231 100644
--- a/libraries/shared/factories/event/repository.go
+++ b/libraries/shared/factories/event/repository.go
@@ -24,7 +24,7 @@ import (
"github.com/vulcanize/vulcanizedb/utils"
"github.com/sirupsen/logrus"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
)
const SetLogTransformedQuery = `UPDATE public.header_sync_logs SET transformed = true WHERE id = $1`
diff --git a/libraries/shared/factories/event/repository_test.go b/libraries/shared/factories/event/repository_test.go
index 436bbb1e..88c978b6 100644
--- a/libraries/shared/factories/event/repository_test.go
+++ b/libraries/shared/factories/event/repository_test.go
@@ -24,9 +24,9 @@ import (
. "github.com/onsi/gomega"
"github.com/vulcanize/vulcanizedb/libraries/shared/factories/event"
"github.com/vulcanize/vulcanizedb/libraries/shared/test_data"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
"github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres/repositories"
"github.com/vulcanize/vulcanizedb/pkg/eth/fakes"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
"github.com/vulcanize/vulcanizedb/test_config"
)
diff --git a/libraries/shared/factories/event/transformer.go b/libraries/shared/factories/event/transformer.go
index 77699b1b..6aad61e0 100644
--- a/libraries/shared/factories/event/transformer.go
+++ b/libraries/shared/factories/event/transformer.go
@@ -20,7 +20,7 @@ import (
"github.com/sirupsen/logrus"
"github.com/vulcanize/vulcanizedb/libraries/shared/transformer"
"github.com/vulcanize/vulcanizedb/pkg/eth/core"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
)
type Transformer struct {
diff --git a/libraries/shared/factories/storage/keys_loader.go b/libraries/shared/factories/storage/keys_loader.go
index e74470e8..012b0361 100644
--- a/libraries/shared/factories/storage/keys_loader.go
+++ b/libraries/shared/factories/storage/keys_loader.go
@@ -19,7 +19,7 @@ package storage
import (
"github.com/ethereum/go-ethereum/common"
"github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
)
type KeysLoader interface {
diff --git a/libraries/shared/factories/storage/keys_lookup.go b/libraries/shared/factories/storage/keys_lookup.go
index 88ac3b65..71277301 100644
--- a/libraries/shared/factories/storage/keys_lookup.go
+++ b/libraries/shared/factories/storage/keys_lookup.go
@@ -19,7 +19,7 @@ package storage
import (
"github.com/ethereum/go-ethereum/common"
"github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
)
type KeysLookup interface {
diff --git a/libraries/shared/factories/storage/repository.go b/libraries/shared/factories/storage/repository.go
index 68877656..07a43a85 100644
--- a/libraries/shared/factories/storage/repository.go
+++ b/libraries/shared/factories/storage/repository.go
@@ -18,7 +18,7 @@ package storage
import (
"github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
)
type Repository interface {
diff --git a/libraries/shared/factories/storage/transformer.go b/libraries/shared/factories/storage/transformer.go
index 85d1cd9a..6ede4662 100644
--- a/libraries/shared/factories/storage/transformer.go
+++ b/libraries/shared/factories/storage/transformer.go
@@ -20,7 +20,7 @@ import (
"github.com/ethereum/go-ethereum/common"
"github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils"
"github.com/vulcanize/vulcanizedb/libraries/shared/transformer"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
)
type Transformer struct {
diff --git a/libraries/shared/mocks/event_converter.go b/libraries/shared/mocks/event_converter.go
index eeb65dce..41172add 100644
--- a/libraries/shared/mocks/event_converter.go
+++ b/libraries/shared/mocks/event_converter.go
@@ -19,7 +19,7 @@ package mocks
import (
"github.com/vulcanize/vulcanizedb/libraries/shared/factories/event"
"github.com/vulcanize/vulcanizedb/pkg/eth/core"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
)
type MockConverter struct {
diff --git a/libraries/shared/mocks/event_repository.go b/libraries/shared/mocks/event_repository.go
index c69c8a38..c9f4cfb7 100644
--- a/libraries/shared/mocks/event_repository.go
+++ b/libraries/shared/mocks/event_repository.go
@@ -18,7 +18,7 @@ package mocks
import (
"github.com/vulcanize/vulcanizedb/libraries/shared/factories/event"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
)
type MockEventRepository struct {
diff --git a/libraries/shared/mocks/event_transformer.go b/libraries/shared/mocks/event_transformer.go
index dc5ee4e3..44eb3e1b 100644
--- a/libraries/shared/mocks/event_transformer.go
+++ b/libraries/shared/mocks/event_transformer.go
@@ -19,8 +19,8 @@ package mocks
import (
"github.com/vulcanize/vulcanizedb/libraries/shared/transformer"
"github.com/vulcanize/vulcanizedb/pkg/eth/core"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
"github.com/vulcanize/vulcanizedb/pkg/eth/fakes"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
)
type MockEventTransformer struct {
diff --git a/libraries/shared/mocks/storage_keys_loader.go b/libraries/shared/mocks/storage_keys_loader.go
index a76b9507..747879ad 100644
--- a/libraries/shared/mocks/storage_keys_loader.go
+++ b/libraries/shared/mocks/storage_keys_loader.go
@@ -19,7 +19,7 @@ package mocks
import (
"github.com/ethereum/go-ethereum/common"
"github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
)
type MockStorageKeysLoader struct {
diff --git a/libraries/shared/mocks/storage_keys_lookup.go b/libraries/shared/mocks/storage_keys_lookup.go
index 29b9445c..6c047523 100644
--- a/libraries/shared/mocks/storage_keys_lookup.go
+++ b/libraries/shared/mocks/storage_keys_lookup.go
@@ -19,7 +19,7 @@ package mocks
import (
"github.com/ethereum/go-ethereum/common"
"github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
)
type MockStorageKeysLookup struct {
diff --git a/libraries/shared/mocks/storage_repository.go b/libraries/shared/mocks/storage_repository.go
index 872492dd..1a4c340d 100644
--- a/libraries/shared/mocks/storage_repository.go
+++ b/libraries/shared/mocks/storage_repository.go
@@ -18,7 +18,7 @@ package mocks
import (
"github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
)
type MockStorageRepository struct {
diff --git a/libraries/shared/mocks/storage_transformer.go b/libraries/shared/mocks/storage_transformer.go
index d81cfdf3..4f5b7a6b 100644
--- a/libraries/shared/mocks/storage_transformer.go
+++ b/libraries/shared/mocks/storage_transformer.go
@@ -21,7 +21,7 @@ import (
"github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils"
"github.com/vulcanize/vulcanizedb/libraries/shared/transformer"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
)
// MockStorageTransformer for tests
diff --git a/libraries/shared/repository/address_repository.go b/libraries/shared/repository/address_repository.go
index eb02ddba..9e4ce40b 100644
--- a/libraries/shared/repository/address_repository.go
+++ b/libraries/shared/repository/address_repository.go
@@ -33,7 +33,7 @@ import (
"github.com/ethereum/go-ethereum/common"
"github.com/jmoiron/sqlx"
"github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
)
const getOrCreateAddressQuery = `WITH addressId AS (
diff --git a/libraries/shared/repository/address_repository_test.go b/libraries/shared/repository/address_repository_test.go
index bbe8b8d7..7c147ee4 100644
--- a/libraries/shared/repository/address_repository_test.go
+++ b/libraries/shared/repository/address_repository_test.go
@@ -25,8 +25,8 @@ import (
"github.com/vulcanize/vulcanizedb/libraries/shared/repository"
"github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
"github.com/vulcanize/vulcanizedb/pkg/eth/fakes"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
"github.com/vulcanize/vulcanizedb/test_config"
)
diff --git a/libraries/shared/storage/storage_queue.go b/libraries/shared/storage/storage_queue.go
index 4de5a818..f7102170 100644
--- a/libraries/shared/storage/storage_queue.go
+++ b/libraries/shared/storage/storage_queue.go
@@ -18,7 +18,7 @@ package storage
import (
"github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
)
type IStorageQueue interface {
diff --git a/libraries/shared/storage/storage_queue_test.go b/libraries/shared/storage/storage_queue_test.go
index 076b18aa..4656429f 100644
--- a/libraries/shared/storage/storage_queue_test.go
+++ b/libraries/shared/storage/storage_queue_test.go
@@ -22,8 +22,8 @@ import (
. "github.com/onsi/gomega"
"github.com/vulcanize/vulcanizedb/libraries/shared/storage"
"github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
"github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres/repositories"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
"github.com/vulcanize/vulcanizedb/test_config"
)
diff --git a/libraries/shared/test_data/test_helpers.go b/libraries/shared/test_data/test_helpers.go
index 4caeabdc..2edb66f8 100644
--- a/libraries/shared/test_data/test_helpers.go
+++ b/libraries/shared/test_data/test_helpers.go
@@ -7,8 +7,8 @@ import (
"github.com/ethereum/go-ethereum/core/types"
. "github.com/onsi/gomega"
"github.com/vulcanize/vulcanizedb/pkg/eth/core"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
"github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres/repositories"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
)
// Create a header sync log to reference in an event, returning inserted header sync log
diff --git a/libraries/shared/transactions/syncer.go b/libraries/shared/transactions/syncer.go
index b8c30148..7717331b 100644
--- a/libraries/shared/transactions/syncer.go
+++ b/libraries/shared/transactions/syncer.go
@@ -21,8 +21,8 @@ import (
"github.com/ethereum/go-ethereum/core/types"
"github.com/vulcanize/vulcanizedb/pkg/eth/core"
"github.com/vulcanize/vulcanizedb/pkg/eth/datastore"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
"github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres/repositories"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
)
type ITransactionsSyncer interface {
diff --git a/libraries/shared/transformer/contract_transformer.go b/libraries/shared/transformer/contract_transformer.go
index 1fc39416..98547c88 100644
--- a/libraries/shared/transformer/contract_transformer.go
+++ b/libraries/shared/transformer/contract_transformer.go
@@ -19,7 +19,7 @@ package transformer
import (
"github.com/vulcanize/vulcanizedb/pkg/config"
"github.com/vulcanize/vulcanizedb/pkg/eth/core"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
)
type ContractTransformer interface {
diff --git a/libraries/shared/transformer/event_transformer.go b/libraries/shared/transformer/event_transformer.go
index 19d271f3..a724340c 100644
--- a/libraries/shared/transformer/event_transformer.go
+++ b/libraries/shared/transformer/event_transformer.go
@@ -19,7 +19,7 @@ package transformer
import (
"github.com/ethereum/go-ethereum/common"
"github.com/vulcanize/vulcanizedb/pkg/eth/core"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
)
type EventTransformer interface {
diff --git a/libraries/shared/transformer/storage_transformer.go b/libraries/shared/transformer/storage_transformer.go
index 6811a37b..f2b91ac5 100644
--- a/libraries/shared/transformer/storage_transformer.go
+++ b/libraries/shared/transformer/storage_transformer.go
@@ -19,7 +19,7 @@ package transformer
import (
"github.com/ethereum/go-ethereum/common"
"github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
)
type StorageTransformer interface {
diff --git a/libraries/shared/transformer/super_node_transformer.go b/libraries/shared/transformer/super_node_transformer.go
index a6412947..bda8b6a1 100644
--- a/libraries/shared/transformer/super_node_transformer.go
+++ b/libraries/shared/transformer/super_node_transformer.go
@@ -18,7 +18,7 @@ package transformer
import (
"github.com/vulcanize/vulcanizedb/pkg/eth/core"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
"github.com/vulcanize/vulcanizedb/pkg/super_node/shared"
)
diff --git a/libraries/shared/watcher/contract_watcher.go b/libraries/shared/watcher/contract_watcher.go
index e942466b..607dad40 100644
--- a/libraries/shared/watcher/contract_watcher.go
+++ b/libraries/shared/watcher/contract_watcher.go
@@ -27,7 +27,7 @@ import (
"github.com/vulcanize/vulcanizedb/libraries/shared/transformer"
"github.com/vulcanize/vulcanizedb/pkg/eth/core"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
)
type ContractWatcher struct {
diff --git a/libraries/shared/watcher/event_watcher.go b/libraries/shared/watcher/event_watcher.go
index 02ca7d7e..57fc12b4 100644
--- a/libraries/shared/watcher/event_watcher.go
+++ b/libraries/shared/watcher/event_watcher.go
@@ -28,8 +28,8 @@ import (
"github.com/vulcanize/vulcanizedb/libraries/shared/transactions"
"github.com/vulcanize/vulcanizedb/libraries/shared/transformer"
"github.com/vulcanize/vulcanizedb/pkg/eth/core"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
"github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres/repositories"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
)
const NoNewDataPause = time.Second * 7
@@ -44,7 +44,6 @@ type EventWatcher struct {
func NewEventWatcher(db *postgres.DB, bc core.BlockChain) EventWatcher {
extractor := &logs.LogExtractor{
CheckedHeadersRepository: repositories.NewCheckedHeadersRepository(db),
- CheckedLogsRepository: repositories.NewCheckedLogsRepository(db),
Fetcher: fetcher.NewLogFetcher(bc),
LogRepository: repositories.NewHeaderSyncLogRepository(db),
Syncer: transactions.NewTransactionsSyncer(db, bc),
diff --git a/libraries/shared/watcher/storage_watcher.go b/libraries/shared/watcher/storage_watcher.go
index 3ec3cf84..b7fc0109 100644
--- a/libraries/shared/watcher/storage_watcher.go
+++ b/libraries/shared/watcher/storage_watcher.go
@@ -28,8 +28,8 @@ import (
"github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils"
"github.com/vulcanize/vulcanizedb/libraries/shared/transformer"
"github.com/vulcanize/vulcanizedb/pkg/eth/datastore"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
"github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres/repositories"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
)
type IStorageWatcher interface {
diff --git a/pkg/eth/blockchain.go b/pkg/eth/blockchain.go
index fa539906..67f54c18 100644
--- a/pkg/eth/blockchain.go
+++ b/pkg/eth/blockchain.go
@@ -78,7 +78,7 @@ func (blockChain *BlockChain) GetEthLogsWithCustomQuery(query ethereum.FilterQue
func (blockChain *BlockChain) GetHeaderByNumber(blockNumber int64) (header core.Header, err error) {
logrus.Debugf("GetHeaderByNumber called with block %d", blockNumber)
- if blockChain.node.NetworkID == core.KOVAN_NETWORK_ID {
+ if blockChain.node.NetworkID == string(core.KOVAN_NETWORK_ID) {
return blockChain.getPOAHeader(blockNumber)
}
return blockChain.getPOWHeader(blockNumber)
@@ -86,7 +86,7 @@ func (blockChain *BlockChain) GetHeaderByNumber(blockNumber int64) (header core.
func (blockChain *BlockChain) GetHeadersByNumbers(blockNumbers []int64) (header []core.Header, err error) {
logrus.Debug("GetHeadersByNumbers called")
- if blockChain.node.NetworkID == core.KOVAN_NETWORK_ID {
+ if blockChain.node.NetworkID == string(core.KOVAN_NETWORK_ID) {
return blockChain.getPOAHeaders(blockNumbers)
}
return blockChain.getPOWHeaders(blockNumbers)
diff --git a/pkg/eth/blockchain_test.go b/pkg/eth/blockchain_test.go
index ac39bb5a..bfd793cd 100644
--- a/pkg/eth/blockchain_test.go
+++ b/pkg/eth/blockchain_test.go
@@ -102,7 +102,7 @@ var _ = Describe("Geth blockchain", func() {
Describe("POA/Kovan", func() {
It("fetches header from rpcClient", func() {
- node.NetworkID = vulcCore.KOVAN_NETWORK_ID
+ node.NetworkID = string(vulcCore.KOVAN_NETWORK_ID)
blockNumber := hexutil.Big(*big.NewInt(100))
mockRpcClient.SetReturnPOAHeader(vulcCore.POAHeader{Number: &blockNumber})
blockChain = eth.NewBlockChain(mockClient, mockRpcClient, node, fakes.NewMockTransactionConverter())
@@ -114,7 +114,7 @@ var _ = Describe("Geth blockchain", func() {
})
It("returns err if rpcClient returns err", func() {
- node.NetworkID = vulcCore.KOVAN_NETWORK_ID
+ node.NetworkID = string(vulcCore.KOVAN_NETWORK_ID)
mockRpcClient.SetCallContextErr(fakes.FakeError)
blockChain = eth.NewBlockChain(mockClient, mockRpcClient, node, fakes.NewMockTransactionConverter())
@@ -125,7 +125,7 @@ var _ = Describe("Geth blockchain", func() {
})
It("returns error if returned header is empty", func() {
- node.NetworkID = vulcCore.KOVAN_NETWORK_ID
+ node.NetworkID = string(vulcCore.KOVAN_NETWORK_ID)
blockChain = eth.NewBlockChain(mockClient, mockRpcClient, node, fakes.NewMockTransactionConverter())
_, err := blockChain.GetHeaderByNumber(100)
@@ -135,7 +135,7 @@ var _ = Describe("Geth blockchain", func() {
})
It("returns multiple headers with multiple blocknumbers", func() {
- node.NetworkID = vulcCore.KOVAN_NETWORK_ID
+ node.NetworkID = string(vulcCore.KOVAN_NETWORK_ID)
blockNumber := hexutil.Big(*big.NewInt(100))
mockRpcClient.SetReturnPOAHeaders([]vulcCore.POAHeader{{Number: &blockNumber}})
diff --git a/pkg/eth/cold_import/cold_import_suite_test.go b/pkg/eth/cold_import/cold_import_suite_test.go
deleted file mode 100644
index d11dbf00..00000000
--- a/pkg/eth/cold_import/cold_import_suite_test.go
+++ /dev/null
@@ -1,29 +0,0 @@
-// VulcanizeDB
-// Copyright © 2019 Vulcanize
-
-// This program is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Affero General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-
-// This program is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Affero General Public License for more details.
-
-// You should have received a copy of the GNU Affero General Public License
-// along with this program. If not, see .
-
-package cold_import_test
-
-import (
- "testing"
-
- . "github.com/onsi/ginkgo"
- . "github.com/onsi/gomega"
-)
-
-func TestColdImport(t *testing.T) {
- RegisterFailHandler(Fail)
- RunSpecs(t, "ColdImport Suite")
-}
diff --git a/pkg/eth/cold_import/importer.go b/pkg/eth/cold_import/importer.go
deleted file mode 100644
index 5248356a..00000000
--- a/pkg/eth/cold_import/importer.go
+++ /dev/null
@@ -1,75 +0,0 @@
-// VulcanizeDB
-// Copyright © 2019 Vulcanize
-
-// This program is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Affero General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-
-// This program is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Affero General Public License for more details.
-
-// You should have received a copy of the GNU Affero General Public License
-// along with this program. If not, see .
-
-package cold_import
-
-import (
- "github.com/vulcanize/vulcanizedb/pkg/eth/converters/common"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/ethereum"
-)
-
-type ColdImporter struct {
- blockRepository datastore.BlockRepository
- converter common.BlockConverter
- ethDB ethereum.Database
- receiptRepository datastore.FullSyncReceiptRepository
-}
-
-func NewColdImporter(ethDB ethereum.Database, blockRepository datastore.BlockRepository, receiptRepository datastore.FullSyncReceiptRepository, converter common.BlockConverter) *ColdImporter {
- return &ColdImporter{
- blockRepository: blockRepository,
- converter: converter,
- ethDB: ethDB,
- receiptRepository: receiptRepository,
- }
-}
-
-func (ci *ColdImporter) Execute(startingBlockNumber int64, endingBlockNumber int64, nodeID string) error {
- missingBlocks := ci.blockRepository.MissingBlockNumbers(startingBlockNumber, endingBlockNumber, nodeID)
- for _, n := range missingBlocks {
- hash := ci.ethDB.GetBlockHash(n)
-
- blockID, err := ci.createBlocksAndTransactions(hash, n)
- if err != nil {
- return err
- }
- err = ci.createReceiptsAndLogs(hash, n, blockID)
- if err != nil {
- return err
- }
- }
- ci.blockRepository.SetBlocksStatus(endingBlockNumber)
- return nil
-}
-
-func (ci *ColdImporter) createBlocksAndTransactions(hash []byte, i int64) (int64, error) {
- block := ci.ethDB.GetBlock(hash, i)
- coreBlock, err := ci.converter.ToCoreBlock(block)
- if err != nil {
- return 0, err
- }
- return ci.blockRepository.CreateOrUpdateBlock(coreBlock)
-}
-
-func (ci *ColdImporter) createReceiptsAndLogs(hash []byte, number int64, blockID int64) error {
- receipts := ci.ethDB.GetBlockReceipts(hash, number)
- coreReceipts, err := common.ToCoreReceipts(receipts)
- if err != nil {
- return err
- }
- return ci.receiptRepository.CreateReceiptsAndLogs(blockID, coreReceipts)
-}
diff --git a/pkg/eth/cold_import/importer_test.go b/pkg/eth/cold_import/importer_test.go
deleted file mode 100644
index 030f7703..00000000
--- a/pkg/eth/cold_import/importer_test.go
+++ /dev/null
@@ -1,152 +0,0 @@
-// VulcanizeDB
-// Copyright © 2019 Vulcanize
-
-// This program is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Affero General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-
-// This program is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Affero General Public License for more details.
-
-// You should have received a copy of the GNU Affero General Public License
-// along with this program. If not, see .
-
-package cold_import_test
-
-import (
- "github.com/ethereum/go-ethereum/core/types"
- . "github.com/onsi/ginkgo"
- . "github.com/onsi/gomega"
- "github.com/vulcanize/vulcanizedb/pkg/eth/cold_import"
- vulcCommon "github.com/vulcanize/vulcanizedb/pkg/eth/converters/common"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres/repositories"
- "github.com/vulcanize/vulcanizedb/pkg/eth/fakes"
-)
-
-var _ = Describe("Geth cold importer", func() {
- var fakeGethBlock *types.Block
-
- BeforeEach(func() {
- header := &types.Header{}
- transactions := []*types.Transaction{}
- uncles := []*types.Header{}
- receipts := []*types.Receipt{}
- fakeGethBlock = types.NewBlock(header, transactions, uncles, receipts)
- })
-
- It("only populates missing blocks", func() {
- mockEthereumDatabase := fakes.NewMockEthereumDatabase()
- mockBlockRepository := fakes.NewMockBlockRepository()
- mockReceiptRepository := fakes.NewMockReceiptRepository()
- mockTransactionConverter := fakes.NewMockTransactionConverter()
- blockConverter := vulcCommon.NewBlockConverter(mockTransactionConverter)
-
- nodeId := "node_id"
- startingBlockNumber := int64(120)
- missingBlockNumber := int64(123)
- endingBlockNumber := int64(125)
- fakeHash := []byte{1, 2, 3, 4, 5}
- mockBlockRepository.SetMissingBlockNumbersReturnArray([]int64{missingBlockNumber})
- mockEthereumDatabase.SetReturnHash(fakeHash)
- mockEthereumDatabase.SetReturnBlock(fakeGethBlock)
- importer := cold_import.NewColdImporter(mockEthereumDatabase, mockBlockRepository, mockReceiptRepository, blockConverter)
-
- err := importer.Execute(startingBlockNumber, endingBlockNumber, nodeId)
- Expect(err).NotTo(HaveOccurred())
-
- mockBlockRepository.AssertMissingBlockNumbersCalledWith(startingBlockNumber, endingBlockNumber, nodeId)
- mockEthereumDatabase.AssertGetBlockHashCalledWith(missingBlockNumber)
- mockEthereumDatabase.AssertGetBlockCalledWith(fakeHash, missingBlockNumber)
- })
-
- It("fetches missing blocks from level db and persists them to pg", func() {
- mockEthereumDatabase := fakes.NewMockEthereumDatabase()
- mockBlockRepository := fakes.NewMockBlockRepository()
- mockReceiptRepository := fakes.NewMockReceiptRepository()
- mockTransactionConverter := fakes.NewMockTransactionConverter()
- blockConverter := vulcCommon.NewBlockConverter(mockTransactionConverter)
-
- blockNumber := int64(123)
- fakeHash := []byte{1, 2, 3, 4, 5}
- mockBlockRepository.SetMissingBlockNumbersReturnArray([]int64{blockNumber})
- mockEthereumDatabase.SetReturnHash(fakeHash)
- mockEthereumDatabase.SetReturnBlock(fakeGethBlock)
- importer := cold_import.NewColdImporter(mockEthereumDatabase, mockBlockRepository, mockReceiptRepository, blockConverter)
-
- err := importer.Execute(blockNumber, blockNumber, "node_id")
- Expect(err).NotTo(HaveOccurred())
-
- mockEthereumDatabase.AssertGetBlockHashCalledWith(blockNumber)
- mockEthereumDatabase.AssertGetBlockCalledWith(fakeHash, blockNumber)
- Expect(mockTransactionConverter.ConvertBlockTransactionsToCoreCalled).To(BeTrue())
- Expect(mockTransactionConverter.ConvertBlockTransactionsToCorePassedBlock).To(Equal(fakeGethBlock))
- convertedBlock, err := blockConverter.ToCoreBlock(fakeGethBlock)
- Expect(err).NotTo(HaveOccurred())
- mockBlockRepository.AssertCreateOrUpdateBlockCalledWith(convertedBlock)
- })
-
- It("sets is_final status on populated blocks", func() {
- mockEthereumDatabase := fakes.NewMockEthereumDatabase()
- mockBlockRepository := fakes.NewMockBlockRepository()
- mockReceiptRepository := fakes.NewMockReceiptRepository()
- mockTransactionConverter := fakes.NewMockTransactionConverter()
- blockConverter := vulcCommon.NewBlockConverter(mockTransactionConverter)
-
- startingBlockNumber := int64(120)
- endingBlockNumber := int64(125)
- fakeHash := []byte{1, 2, 3, 4, 5}
- mockBlockRepository.SetMissingBlockNumbersReturnArray([]int64{startingBlockNumber})
- mockEthereumDatabase.SetReturnHash(fakeHash)
- mockEthereumDatabase.SetReturnBlock(fakeGethBlock)
- importer := cold_import.NewColdImporter(mockEthereumDatabase, mockBlockRepository, mockReceiptRepository, blockConverter)
-
- err := importer.Execute(startingBlockNumber, endingBlockNumber, "node_id")
- Expect(err).NotTo(HaveOccurred())
- mockBlockRepository.AssertSetBlockStatusCalledWith(endingBlockNumber)
- })
-
- It("fetches receipts from level db and persists them to pg", func() {
- mockEthereumDatabase := fakes.NewMockEthereumDatabase()
- mockBlockRepository := fakes.NewMockBlockRepository()
- mockReceiptRepository := fakes.NewMockReceiptRepository()
- mockTransactionConverter := fakes.NewMockTransactionConverter()
- blockConverter := vulcCommon.NewBlockConverter(mockTransactionConverter)
-
- blockNumber := int64(123)
- blockId := int64(999)
- mockBlockRepository.SetCreateOrUpdateBlockReturnVals(blockId, nil)
- fakeReceipts := types.Receipts{{}}
- mockBlockRepository.SetMissingBlockNumbersReturnArray([]int64{blockNumber})
- mockEthereumDatabase.SetReturnBlock(fakeGethBlock)
- mockEthereumDatabase.SetReturnReceipts(fakeReceipts)
- importer := cold_import.NewColdImporter(mockEthereumDatabase, mockBlockRepository, mockReceiptRepository, blockConverter)
-
- err := importer.Execute(blockNumber, blockNumber, "node_id")
- Expect(err).NotTo(HaveOccurred())
- expectedReceipts, err := vulcCommon.ToCoreReceipts(fakeReceipts)
- Expect(err).ToNot(HaveOccurred())
- mockReceiptRepository.AssertCreateReceiptsAndLogsCalledWith(blockId, expectedReceipts)
- })
-
- It("does not fetch receipts if block already exists", func() {
- mockEthereumDatabase := fakes.NewMockEthereumDatabase()
- mockBlockRepository := fakes.NewMockBlockRepository()
- mockReceiptRepository := fakes.NewMockReceiptRepository()
- mockTransactionConverter := fakes.NewMockTransactionConverter()
- blockConverter := vulcCommon.NewBlockConverter(mockTransactionConverter)
-
- blockNumber := int64(123)
- mockBlockRepository.SetMissingBlockNumbersReturnArray([]int64{})
- mockEthereumDatabase.SetReturnBlock(fakeGethBlock)
- mockBlockRepository.SetCreateOrUpdateBlockReturnVals(0, repositories.ErrBlockExists)
- importer := cold_import.NewColdImporter(mockEthereumDatabase, mockBlockRepository, mockReceiptRepository, blockConverter)
-
- err := importer.Execute(blockNumber, blockNumber, "node_id")
-
- Expect(err).NotTo(HaveOccurred())
- mockReceiptRepository.AssertCreateReceiptsAndLogsNotCalled()
- })
-})
diff --git a/pkg/eth/cold_import/node_builder.go b/pkg/eth/cold_import/node_builder.go
deleted file mode 100644
index 667d6200..00000000
--- a/pkg/eth/cold_import/node_builder.go
+++ /dev/null
@@ -1,85 +0,0 @@
-// VulcanizeDB
-// Copyright © 2019 Vulcanize
-
-// This program is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Affero General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-
-// This program is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Affero General Public License for more details.
-
-// You should have received a copy of the GNU Affero General Public License
-// along with this program. If not, see .
-
-package cold_import
-
-import (
- "errors"
- "strings"
-
- "github.com/ethereum/go-ethereum/common"
- "github.com/vulcanize/vulcanizedb/pkg/eth/core"
- "github.com/vulcanize/vulcanizedb/pkg/eth/crypto"
- "github.com/vulcanize/vulcanizedb/pkg/fs"
-)
-
-const (
- ColdImportClientName = "LevelDbColdImport"
- ColdImportNetworkID float64 = 1
-)
-
-var (
- NoChainDataErr = errors.New("Level DB path does not include chaindata extension.")
- NoGethRootErr = errors.New("Level DB path does not include root path to geth.")
-)
-
-type ColdImportNodeBuilder struct {
- reader fs.Reader
- parser crypto.PublicKeyParser
-}
-
-func NewColdImportNodeBuilder(reader fs.Reader, parser crypto.PublicKeyParser) ColdImportNodeBuilder {
- return ColdImportNodeBuilder{reader: reader, parser: parser}
-}
-
-func (cinb ColdImportNodeBuilder) GetNode(genesisBlock []byte, levelPath string) (core.Node, error) {
- var coldNode core.Node
- nodeKeyPath, err := getNodeKeyPath(levelPath)
- if err != nil {
- return coldNode, err
- }
- nodeKey, err := cinb.reader.Read(nodeKeyPath)
- if err != nil {
- return coldNode, err
- }
- nodeID, err := cinb.parser.ParsePublicKey(string(nodeKey))
- if err != nil {
- return coldNode, err
- }
- genesisBlockHash := common.BytesToHash(genesisBlock).String()
- coldNode = core.Node{
- GenesisBlock: genesisBlockHash,
- NetworkID: ColdImportNetworkID,
- ID: nodeID,
- ClientName: ColdImportClientName,
- }
- return coldNode, nil
-}
-
-func getNodeKeyPath(levelPath string) (string, error) {
- chaindataExtension := "chaindata"
- if !strings.Contains(levelPath, chaindataExtension) {
- return "", NoChainDataErr
- }
- chaindataExtensionLength := len(chaindataExtension)
- gethRootPathLength := len(levelPath) - chaindataExtensionLength
- if gethRootPathLength <= chaindataExtensionLength {
- return "", NoGethRootErr
- }
- gethRootPath := levelPath[:gethRootPathLength]
- nodeKeyPath := gethRootPath + "nodekey"
- return nodeKeyPath, nil
-}
diff --git a/pkg/eth/cold_import/node_builder_test.go b/pkg/eth/cold_import/node_builder_test.go
deleted file mode 100644
index 1b33e52b..00000000
--- a/pkg/eth/cold_import/node_builder_test.go
+++ /dev/null
@@ -1,114 +0,0 @@
-// VulcanizeDB
-// Copyright © 2019 Vulcanize
-
-// This program is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Affero General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-
-// This program is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Affero General Public License for more details.
-
-// You should have received a copy of the GNU Affero General Public License
-// along with this program. If not, see .
-
-package cold_import_test
-
-import (
- "errors"
-
- "github.com/ethereum/go-ethereum/common"
- . "github.com/onsi/ginkgo"
- . "github.com/onsi/gomega"
- "github.com/vulcanize/vulcanizedb/pkg/eth/cold_import"
- "github.com/vulcanize/vulcanizedb/pkg/eth/fakes"
-)
-
-var _ = Describe("Cold importer node builder", func() {
- Describe("when level path is not valid", func() {
- It("returns error if no chaindata extension", func() {
- gethPath := "path/to/geth"
- mockReader := fakes.NewMockFsReader()
- mockParser := fakes.NewMockCryptoParser()
- nodeBuilder := cold_import.NewColdImportNodeBuilder(mockReader, mockParser)
-
- _, err := nodeBuilder.GetNode([]byte{1, 2, 3, 4, 5}, gethPath)
-
- Expect(err).To(HaveOccurred())
- Expect(err).To(MatchError(cold_import.NoChainDataErr))
- })
-
- It("returns error if no root geth path", func() {
- chaindataPath := "chaindata"
- mockReader := fakes.NewMockFsReader()
- mockParser := fakes.NewMockCryptoParser()
- nodeBuilder := cold_import.NewColdImportNodeBuilder(mockReader, mockParser)
-
- _, err := nodeBuilder.GetNode([]byte{1, 2, 3, 4, 5}, chaindataPath)
-
- Expect(err).To(HaveOccurred())
- Expect(err).To(MatchError(cold_import.NoGethRootErr))
- })
- })
-
- Describe("when reader fails", func() {
- It("returns err", func() {
- mockReader := fakes.NewMockFsReader()
- fakeError := errors.New("Failed")
- mockReader.SetReturnErr(fakeError)
- mockParser := fakes.NewMockCryptoParser()
- nodeBuilder := cold_import.NewColdImportNodeBuilder(mockReader, mockParser)
-
- _, err := nodeBuilder.GetNode([]byte{1, 2, 3, 4, 5}, "path/to/geth/chaindata")
-
- Expect(err).To(HaveOccurred())
- Expect(err).To(MatchError(fakeError))
- })
- })
-
- Describe("when parser fails", func() {
- It("returns err", func() {
- mockReader := fakes.NewMockFsReader()
- mockParser := fakes.NewMockCryptoParser()
- fakeErr := errors.New("Failed")
- mockParser.SetReturnErr(fakeErr)
- nodeBuilder := cold_import.NewColdImportNodeBuilder(mockReader, mockParser)
-
- _, err := nodeBuilder.GetNode([]byte{1, 2, 3, 4, 5}, "path/to/geth/chaindata")
-
- Expect(err).To(HaveOccurred())
- Expect(err).To(MatchError(fakeErr))
- })
- })
-
- Describe("when path is valid and reader and parser succeed", func() {
- It("builds a node", func() {
- fakeGenesisBlock := []byte{1, 2, 3, 4, 5}
- fakeRootGethPath := "root/path/to/geth/"
- fakeLevelPath := fakeRootGethPath + "chaindata"
- fakeNodeKeyPath := fakeRootGethPath + "nodekey"
- fakePublicKeyBytes := []byte{5, 4, 3, 2, 1}
- fakePublicKeyString := "public_key"
- mockReader := fakes.NewMockFsReader()
- mockReader.SetReturnBytes(fakePublicKeyBytes)
- mockParser := fakes.NewMockCryptoParser()
- mockParser.SetReturnVal(fakePublicKeyString)
- nodeBuilder := cold_import.NewColdImportNodeBuilder(mockReader, mockParser)
-
- result, err := nodeBuilder.GetNode(fakeGenesisBlock, fakeLevelPath)
-
- Expect(err).NotTo(HaveOccurred())
- mockReader.AssertReadCalledWith(fakeNodeKeyPath)
- mockParser.AssertParsePublicKeyCalledWith(string(fakePublicKeyBytes))
- Expect(result).NotTo(BeNil())
- Expect(result.ClientName).To(Equal(cold_import.ColdImportClientName))
- expectedGenesisBlock := common.BytesToHash(fakeGenesisBlock).String()
- Expect(result.GenesisBlock).To(Equal(expectedGenesisBlock))
- Expect(result.ID).To(Equal(fakePublicKeyString))
- Expect(result.NetworkID).To(Equal(cold_import.ColdImportNetworkID))
- })
- })
-
-})
diff --git a/pkg/eth/contract_watcher/full/converter/converter.go b/pkg/eth/contract_watcher/full/converter/converter.go
deleted file mode 100644
index 0b231276..00000000
--- a/pkg/eth/contract_watcher/full/converter/converter.go
+++ /dev/null
@@ -1,117 +0,0 @@
-// VulcanizeDB
-// Copyright © 2019 Vulcanize
-
-// This program is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Affero General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-
-// This program is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Affero General Public License for more details.
-
-// You should have received a copy of the GNU Affero General Public License
-// along with this program. If not, see .
-
-package converter
-
-import (
- "fmt"
- "math/big"
- "strconv"
-
- "github.com/ethereum/go-ethereum/accounts/abi/bind"
- "github.com/ethereum/go-ethereum/common"
- "github.com/ethereum/go-ethereum/common/hexutil"
-
- "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/contract"
- "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/helpers"
- "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/types"
- "github.com/vulcanize/vulcanizedb/pkg/eth/core"
-)
-
-// ConverterInterface is used to convert watched event logs to
-// custom logs containing event input name => value maps
-type ConverterInterface interface {
- Convert(watchedEvent core.WatchedEvent, event types.Event) (*types.Log, error)
- Update(info *contract.Contract)
-}
-
-// Converter is the underlying struct for the ConverterInterface
-type Converter struct {
- ContractInfo *contract.Contract
-}
-
-// Update configures the converter for a specific contract
-func (c *Converter) Update(info *contract.Contract) {
- c.ContractInfo = info
-}
-
-// Convert converts the given watched event log into a types.Log for the given event
-func (c *Converter) Convert(watchedEvent core.WatchedEvent, event types.Event) (*types.Log, error) {
- boundContract := bind.NewBoundContract(common.HexToAddress(c.ContractInfo.Address), c.ContractInfo.ParsedAbi, nil, nil, nil)
- values := make(map[string]interface{})
- log := helpers.ConvertToLog(watchedEvent)
- err := boundContract.UnpackLogIntoMap(values, event.Name, log)
- if err != nil {
- return nil, err
- }
-
- strValues := make(map[string]string, len(values))
- seenAddrs := make([]interface{}, 0, len(values))
- seenHashes := make([]interface{}, 0, len(values))
- for fieldName, input := range values {
- // Postgres cannot handle custom types, resolve to strings
- switch input.(type) {
- case *big.Int:
- b := input.(*big.Int)
- strValues[fieldName] = b.String()
- case common.Address:
- a := input.(common.Address)
- strValues[fieldName] = a.String()
- seenAddrs = append(seenAddrs, a)
- case common.Hash:
- h := input.(common.Hash)
- strValues[fieldName] = h.String()
- seenHashes = append(seenHashes, h)
- case string:
- strValues[fieldName] = input.(string)
- case bool:
- strValues[fieldName] = strconv.FormatBool(input.(bool))
- case []byte:
- b := input.([]byte)
- strValues[fieldName] = hexutil.Encode(b)
- if len(b) == 32 { // collect byte arrays of size 32 as hashes
- seenHashes = append(seenHashes, common.HexToHash(strValues[fieldName]))
- }
- case byte:
- b := input.(byte)
- strValues[fieldName] = string(b)
- default:
- return nil, fmt.Errorf("error: unhandled abi type %T", input)
- }
- }
-
- // Only hold onto logs that pass our address filter, if any
- if c.ContractInfo.PassesEventFilter(strValues) {
- eventLog := &types.Log{
- ID: watchedEvent.LogID,
- Values: strValues,
- Block: watchedEvent.BlockNumber,
- Tx: watchedEvent.TxHash,
- }
-
- // Cache emitted values if their caching is turned on
- if c.ContractInfo.EmittedAddrs != nil {
- c.ContractInfo.AddEmittedAddr(seenAddrs...)
- }
- if c.ContractInfo.EmittedHashes != nil {
- c.ContractInfo.AddEmittedHash(seenHashes...)
- }
-
- return eventLog, nil
- }
-
- return nil, nil
-}
diff --git a/pkg/eth/contract_watcher/full/converter/converter_suite_test.go b/pkg/eth/contract_watcher/full/converter/converter_suite_test.go
deleted file mode 100644
index e8cb72e6..00000000
--- a/pkg/eth/contract_watcher/full/converter/converter_suite_test.go
+++ /dev/null
@@ -1,35 +0,0 @@
-// VulcanizeDB
-// Copyright © 2019 Vulcanize
-
-// This program is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Affero General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-
-// This program is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Affero General Public License for more details.
-
-// You should have received a copy of the GNU Affero General Public License
-// along with this program. If not, see .
-
-package converter_test
-
-import (
- "io/ioutil"
- "log"
- "testing"
-
- . "github.com/onsi/ginkgo"
- . "github.com/onsi/gomega"
-)
-
-func TestConverter(t *testing.T) {
- RegisterFailHandler(Fail)
- RunSpecs(t, "Full Converter Suite Test")
-}
-
-var _ = BeforeSuite(func() {
- log.SetOutput(ioutil.Discard)
-})
diff --git a/pkg/eth/contract_watcher/full/converter/converter_test.go b/pkg/eth/contract_watcher/full/converter/converter_test.go
deleted file mode 100644
index 9d820e64..00000000
--- a/pkg/eth/contract_watcher/full/converter/converter_test.go
+++ /dev/null
@@ -1,113 +0,0 @@
-// VulcanizeDB
-// Copyright © 2019 Vulcanize
-
-// This program is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Affero General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-
-// This program is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Affero General Public License for more details.
-
-// You should have received a copy of the GNU Affero General Public License
-// along with this program. If not, see .
-
-package converter_test
-
-import (
- "github.com/ethereum/go-ethereum/common"
- . "github.com/onsi/ginkgo"
- . "github.com/onsi/gomega"
-
- "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/full/converter"
- "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/contract"
- "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/helpers"
- "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/helpers/test_helpers"
- "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/helpers/test_helpers/mocks"
-)
-
-var _ = Describe("Converter", func() {
- var con *contract.Contract
- var wantedEvents = []string{"Transfer"}
- var err error
-
- BeforeEach(func() {
- con = test_helpers.SetupTusdContract(wantedEvents, []string{"balanceOf"})
- })
-
- Describe("Update", func() {
- It("Updates contract con held by the converter", func() {
- c := converter.Converter{}
- c.Update(con)
- Expect(c.ContractInfo).To(Equal(con))
-
- con := test_helpers.SetupTusdContract([]string{}, []string{})
- c.Update(con)
- Expect(c.ContractInfo).To(Equal(con))
- })
- })
-
- Describe("Convert", func() {
- It("Converts a watched event log to mapping of event input names to values", func() {
- _, ok := con.Events["Approval"]
- Expect(ok).To(Equal(false))
-
- event, ok := con.Events["Transfer"]
- Expect(ok).To(Equal(true))
- err = con.GenerateFilters()
- Expect(err).ToNot(HaveOccurred())
-
- c := converter.Converter{}
- c.Update(con)
- log, err := c.Convert(mocks.MockTranferEvent, event)
- Expect(err).ToNot(HaveOccurred())
-
- from := common.HexToAddress("0x000000000000000000000000000000000000000000000000000000000000af21")
- to := common.HexToAddress("0x9dd48110dcc444fdc242510c09bbbbe21a5975cac061d82f7b843bce061ba391")
- value := helpers.BigFromString("1097077688018008265106216665536940668749033598146")
-
- v := log.Values["value"]
-
- Expect(log.Values["to"]).To(Equal(to.String()))
- Expect(log.Values["from"]).To(Equal(from.String()))
- Expect(v).To(Equal(value.String()))
- })
-
- It("Keeps track of addresses it sees to grow a token holder address list for the contract", func() {
- event, ok := con.Events["Transfer"]
- Expect(ok).To(Equal(true))
-
- c := converter.Converter{}
- c.Update(con)
- _, err := c.Convert(mocks.MockTranferEvent, event)
- Expect(err).ToNot(HaveOccurred())
-
- b, ok := con.EmittedAddrs[common.HexToAddress("0x000000000000000000000000000000000000Af21")]
- Expect(ok).To(Equal(true))
- Expect(b).To(Equal(true))
-
- b, ok = con.EmittedAddrs[common.HexToAddress("0x09BbBBE21a5975cAc061D82f7b843bCE061BA391")]
- Expect(ok).To(Equal(true))
- Expect(b).To(Equal(true))
-
- _, ok = con.EmittedAddrs[common.HexToAddress("0x")]
- Expect(ok).To(Equal(false))
-
- _, ok = con.EmittedAddrs[""]
- Expect(ok).To(Equal(false))
-
- _, ok = con.EmittedAddrs[common.HexToAddress("0x09THISE21a5IS5cFAKE1D82fAND43bCE06MADEUP")]
- Expect(ok).To(Equal(false))
- })
-
- It("Fails with an empty contract", func() {
- event := con.Events["Transfer"]
- c := converter.Converter{}
- c.Update(&contract.Contract{})
- _, err = c.Convert(mocks.MockTranferEvent, event)
- Expect(err).To(HaveOccurred())
- })
- })
-})
diff --git a/pkg/eth/contract_watcher/full/retriever/block_retriever.go b/pkg/eth/contract_watcher/full/retriever/block_retriever.go
deleted file mode 100644
index 30424dea..00000000
--- a/pkg/eth/contract_watcher/full/retriever/block_retriever.go
+++ /dev/null
@@ -1,99 +0,0 @@
-// VulcanizeDB
-// Copyright © 2019 Vulcanize
-
-// This program is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Affero General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-
-// This program is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Affero General Public License for more details.
-
-// You should have received a copy of the GNU Affero General Public License
-// along with this program. If not, see .
-
-package retriever
-
-import (
- "database/sql"
-
- "github.com/vulcanize/vulcanizedb/libraries/shared/repository"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
-)
-
-// BlockRetriever is used to retrieve the first block for a given contract and the most recent block
-// It requires a vDB synced database with blocks, transactions, receipts, and logs
-type BlockRetriever interface {
- RetrieveFirstBlock(contractAddr string) (int64, error)
- RetrieveMostRecentBlock() (int64, error)
-}
-
-type blockRetriever struct {
- db *postgres.DB
-}
-
-// NewBlockRetriever returns a new BlockRetriever
-func NewBlockRetriever(db *postgres.DB) BlockRetriever {
- return &blockRetriever{
- db: db,
- }
-}
-
-// RetrieveFirstBlock fetches the block number for the earliest block in the db
-// Tries both methods of finding the first block, with the receipt method taking precedence
-func (r *blockRetriever) RetrieveFirstBlock(contractAddr string) (int64, error) {
- i, err := r.retrieveFirstBlockFromReceipts(contractAddr)
- if err != nil {
- if err == sql.ErrNoRows {
- i, err = r.retrieveFirstBlockFromLogs(contractAddr)
- }
- return i, err
- }
-
- return i, err
-}
-
-// For some contracts the contract creation transaction receipt doesn't have the contract address so this doesn't work (e.g. Sai)
-func (r *blockRetriever) retrieveFirstBlockFromReceipts(contractAddr string) (int64, error) {
- var firstBlock int64
- addressID, getAddressErr := repository.GetOrCreateAddress(r.db, contractAddr)
- if getAddressErr != nil {
- return firstBlock, getAddressErr
- }
- err := r.db.Get(
- &firstBlock,
- `SELECT number FROM eth_blocks
- WHERE id = (SELECT block_id FROM full_sync_receipts
- WHERE contract_address_id = $1
- ORDER BY block_id ASC
- LIMIT 1)`,
- addressID,
- )
-
- return firstBlock, err
-}
-
-// In which case this servers as a heuristic to find the first block by finding the first contract event log
-func (r *blockRetriever) retrieveFirstBlockFromLogs(contractAddr string) (int64, error) {
- var firstBlock int
- err := r.db.Get(
- &firstBlock,
- "SELECT block_number FROM full_sync_logs WHERE lower(address) = $1 ORDER BY block_number ASC LIMIT 1",
- contractAddr,
- )
-
- return int64(firstBlock), err
-}
-
-// RetrieveMostRecentBlock retrieves the most recent block number in vDB
-func (r *blockRetriever) RetrieveMostRecentBlock() (int64, error) {
- var lastBlock int64
- err := r.db.Get(
- &lastBlock,
- "SELECT number FROM eth_blocks ORDER BY number DESC LIMIT 1",
- )
-
- return lastBlock, err
-}
diff --git a/pkg/eth/contract_watcher/full/retriever/block_retriever_test.go b/pkg/eth/contract_watcher/full/retriever/block_retriever_test.go
deleted file mode 100644
index 290f5746..00000000
--- a/pkg/eth/contract_watcher/full/retriever/block_retriever_test.go
+++ /dev/null
@@ -1,259 +0,0 @@
-// VulcanizeDB
-// Copyright © 2019 Vulcanize
-
-// This program is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Affero General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-
-// This program is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Affero General Public License for more details.
-
-// You should have received a copy of the GNU Affero General Public License
-// along with this program. If not, see .
-
-package retriever_test
-
-import (
- "strings"
-
- "github.com/ethereum/go-ethereum/core/types"
- . "github.com/onsi/ginkgo"
- . "github.com/onsi/gomega"
-
- "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/full/retriever"
- "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/constants"
- "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/helpers/test_helpers"
- "github.com/vulcanize/vulcanizedb/pkg/eth/core"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres/repositories"
-)
-
-var _ = Describe("Block Retriever", func() {
- var db *postgres.DB
- var r retriever.BlockRetriever
- var rawTransaction []byte
- var blockRepository repositories.BlockRepository
-
- // Contains no contract address
- var block1 = core.Block{
- Hash: "0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad123ert",
- Number: 1,
- Transactions: []core.TransactionModel{},
- }
-
- BeforeEach(func() {
- db, _ = test_helpers.SetupDBandBC()
- blockRepository = *repositories.NewBlockRepository(db)
- r = retriever.NewBlockRetriever(db)
- gethTransaction := types.Transaction{}
- var err error
- rawTransaction, err = gethTransaction.MarshalJSON()
- Expect(err).NotTo(HaveOccurred())
- })
-
- AfterEach(func() {
- test_helpers.TearDown(db)
- })
-
- Describe("RetrieveFirstBlock", func() {
- It("Retrieves block number where contract first appears in receipt, if available", func() {
- // Contains the address in the receipt
- block2 := core.Block{
- Hash: "0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad123ert",
- Number: 2,
- Transactions: []core.TransactionModel{{
- Hash: "0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad6546ae",
- GasPrice: 0,
- GasLimit: 0,
- Nonce: 0,
- Raw: rawTransaction,
- Receipt: core.Receipt{
- TxHash: "0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad6546ae",
- ContractAddress: constants.TusdContractAddress,
- Logs: []core.FullSyncLog{},
- },
- TxIndex: 0,
- Value: "0",
- }},
- }
-
- // Contains address in logs
- block3 := core.Block{
- Hash: "0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad456yui",
- Number: 3,
- Transactions: []core.TransactionModel{{
- Hash: "0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad234hfs",
- GasPrice: 0,
- GasLimit: 0,
- Nonce: 0,
- Raw: rawTransaction,
- Receipt: core.Receipt{
- TxHash: "0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad234hfs",
- ContractAddress: constants.TusdContractAddress,
- Logs: []core.FullSyncLog{{
- BlockNumber: 3,
- TxHash: "0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad234hfs",
- Address: constants.TusdContractAddress,
- Topics: core.Topics{
- constants.TransferEvent.Signature(),
- "0x000000000000000000000000000000000000000000000000000000000000af21",
- "0x9dd48110dcc444fdc242510c09bbbbe21a5975cac061d82f7b843bce061ba391",
- "",
- },
- Index: 1,
- Data: "0x000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc200000000000000000000000089d24a6b4ccb1b6faa2625fe562bdd9a23260359000000000000000000000000000000000000000000000000392d2e2bda9c00000000000000000000000000000000000000000000000000927f41fa0a4a418000000000000000000000000000000000000000000000000000000000005adcfebe",
- }},
- },
- TxIndex: 0,
- Value: "0",
- }},
- }
-
- _, insertErrOne := blockRepository.CreateOrUpdateBlock(block1)
- Expect(insertErrOne).NotTo(HaveOccurred())
- _, insertErrTwo := blockRepository.CreateOrUpdateBlock(block2)
- Expect(insertErrTwo).NotTo(HaveOccurred())
- _, insertErrThree := blockRepository.CreateOrUpdateBlock(block3)
- Expect(insertErrThree).NotTo(HaveOccurred())
-
- i, err := r.RetrieveFirstBlock(strings.ToLower(constants.TusdContractAddress))
- Expect(err).NotTo(HaveOccurred())
- Expect(i).To(Equal(int64(2)))
- })
-
- It("Retrieves block number where contract first appears in event logs if it cannot find the address in a receipt", func() {
- block2 := core.Block{
- Hash: "0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad123ert",
- Number: 2,
- Transactions: []core.TransactionModel{{
- Hash: "0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad6546ae",
- GasPrice: 0,
- GasLimit: 0,
- Nonce: 0,
- Raw: rawTransaction,
- Receipt: core.Receipt{
- TxHash: "0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad6546ae",
- ContractAddress: "",
- Logs: []core.FullSyncLog{{
- BlockNumber: 2,
- TxHash: "0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad6546ae",
- Address: constants.DaiContractAddress,
- Topics: core.Topics{
- constants.TransferEvent.Signature(),
- "0x000000000000000000000000000000000000000000000000000000000000af21",
- "0x9dd48110dcc444fdc242510c09bbbbe21a5975cac061d82f7b843bce061ba391",
- "",
- },
- Index: 1,
- Data: "0x000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc200000000000000000000000089d24a6b4ccb1b6faa2625fe562bdd9a23260359000000000000000000000000000000000000000000000000392d2e2bda9c00000000000000000000000000000000000000000000000000927f41fa0a4a418000000000000000000000000000000000000000000000000000000000005adcfebe",
- }},
- },
- TxIndex: 0,
- Value: "0",
- }},
- }
-
- block3 := core.Block{
- Hash: "0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad456yui",
- Number: 3,
- Transactions: []core.TransactionModel{{
- Hash: "0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad234hfs",
- GasPrice: 0,
- GasLimit: 0,
- Nonce: 0,
- Raw: rawTransaction,
- Receipt: core.Receipt{
- TxHash: "0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad234hfs",
- ContractAddress: "",
- Logs: []core.FullSyncLog{{
- BlockNumber: 3,
- TxHash: "0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad234hfs",
- Address: constants.DaiContractAddress,
- Topics: core.Topics{
- constants.TransferEvent.Signature(),
- "0x000000000000000000000000000000000000000000000000000000000000af21",
- "0x9dd48110dcc444fdc242510c09bbbbe21a5975cac061d82f7b843bce061ba391",
- "",
- },
- Index: 1,
- Data: "0x000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc200000000000000000000000089d24a6b4ccb1b6faa2625fe562bdd9a23260359000000000000000000000000000000000000000000000000392d2e2bda9c00000000000000000000000000000000000000000000000000927f41fa0a4a418000000000000000000000000000000000000000000000000000000000005adcfebe",
- }},
- },
- TxIndex: 0,
- Value: "0",
- }},
- }
-
- _, insertErrOne := blockRepository.CreateOrUpdateBlock(block1)
- Expect(insertErrOne).NotTo(HaveOccurred())
- _, insertErrTwo := blockRepository.CreateOrUpdateBlock(block2)
- Expect(insertErrTwo).NotTo(HaveOccurred())
- _, insertErrThree := blockRepository.CreateOrUpdateBlock(block3)
- Expect(insertErrThree).NotTo(HaveOccurred())
-
- i, err := r.RetrieveFirstBlock(constants.DaiContractAddress)
- Expect(err).NotTo(HaveOccurred())
- Expect(i).To(Equal(int64(2)))
- })
-
- It("Fails if the contract address cannot be found in any blocks", func() {
- block2 := core.Block{
- Hash: "0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad123ert",
- Number: 2,
- Transactions: []core.TransactionModel{},
- }
-
- block3 := core.Block{
- Hash: "0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad456yui",
- Number: 3,
- Transactions: []core.TransactionModel{},
- }
-
- _, insertErrOne := blockRepository.CreateOrUpdateBlock(block1)
- Expect(insertErrOne).NotTo(HaveOccurred())
- _, insertErrTwo := blockRepository.CreateOrUpdateBlock(block2)
- Expect(insertErrTwo).NotTo(HaveOccurred())
- _, insertErrThree := blockRepository.CreateOrUpdateBlock(block3)
- Expect(insertErrThree).NotTo(HaveOccurred())
-
- _, err := r.RetrieveFirstBlock(constants.DaiContractAddress)
- Expect(err).To(HaveOccurred())
- })
- })
-
- Describe("RetrieveMostRecentBlock", func() {
- It("Retrieves the latest block", func() {
- block2 := core.Block{
- Hash: "0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad123ert",
- Number: 2,
- Transactions: []core.TransactionModel{},
- }
-
- block3 := core.Block{
- Hash: "0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad456yui",
- Number: 3,
- Transactions: []core.TransactionModel{},
- }
-
- _, insertErrOne := blockRepository.CreateOrUpdateBlock(block1)
- Expect(insertErrOne).NotTo(HaveOccurred())
- _, insertErrTwo := blockRepository.CreateOrUpdateBlock(block2)
- Expect(insertErrTwo).NotTo(HaveOccurred())
- _, insertErrThree := blockRepository.CreateOrUpdateBlock(block3)
- Expect(insertErrThree).NotTo(HaveOccurred())
-
- i, err := r.RetrieveMostRecentBlock()
- Expect(err).ToNot(HaveOccurred())
- Expect(i).To(Equal(int64(3)))
- })
-
- It("Fails if it cannot retrieve the latest block", func() {
- i, err := r.RetrieveMostRecentBlock()
- Expect(err).To(HaveOccurred())
- Expect(i).To(Equal(int64(0)))
- })
- })
-})
diff --git a/pkg/eth/contract_watcher/full/retriever/retriever_suite_test.go b/pkg/eth/contract_watcher/full/retriever/retriever_suite_test.go
deleted file mode 100644
index 2f97ce0a..00000000
--- a/pkg/eth/contract_watcher/full/retriever/retriever_suite_test.go
+++ /dev/null
@@ -1,35 +0,0 @@
-// VulcanizeDB
-// Copyright © 2019 Vulcanize
-
-// This program is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Affero General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-
-// This program is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Affero General Public License for more details.
-
-// You should have received a copy of the GNU Affero General Public License
-// along with this program. If not, see .
-
-package retriever_test
-
-import (
- "io/ioutil"
- "testing"
-
- . "github.com/onsi/ginkgo"
- . "github.com/onsi/gomega"
- "github.com/sirupsen/logrus"
-)
-
-func TestRetriever(t *testing.T) {
- RegisterFailHandler(Fail)
- RunSpecs(t, "Full Block Number Retriever Suite Test")
-}
-
-var _ = BeforeSuite(func() {
- logrus.SetOutput(ioutil.Discard)
-})
diff --git a/pkg/eth/contract_watcher/full/transformer/transformer.go b/pkg/eth/contract_watcher/full/transformer/transformer.go
deleted file mode 100644
index d3e6a576..00000000
--- a/pkg/eth/contract_watcher/full/transformer/transformer.go
+++ /dev/null
@@ -1,236 +0,0 @@
-// VulcanizeDB
-// Copyright © 2019 Vulcanize
-
-// This program is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Affero General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-
-// This program is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Affero General Public License for more details.
-
-// You should have received a copy of the GNU Affero General Public License
-// along with this program. If not, see .
-
-package transformer
-
-import (
- "errors"
-
- "github.com/sirupsen/logrus"
-
- "github.com/vulcanize/vulcanizedb/pkg/config"
- "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/full/converter"
- "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/full/retriever"
- "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/contract"
- "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/parser"
- "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/poller"
- "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/repository"
- "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/types"
- "github.com/vulcanize/vulcanizedb/pkg/eth/core"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres/repositories"
-)
-
-// Transformer is the top level struct for transforming watched contract data
-// Requires a fully synced vDB and a running eth node (or infura)
-type Transformer struct {
- // Database interfaces
- FilterRepository datastore.FilterRepository // Log filters repo; accepts filters generated by Contract.GenerateFilters()
- WatchedEventRepository datastore.WatchedEventRepository // Watched event log views, created by the log filters
- TransformedEventRepository repository.EventRepository // Holds transformed watched event log data
-
- // Pre-processing interfaces
- Parser parser.Parser // Parses events and methods out of contract abi fetched using contract address
- Retriever retriever.BlockRetriever // Retrieves first block for contract and current block height
-
- // Processing interfaces
- Converter converter.ConverterInterface // Converts watched event logs into custom log
- Poller poller.Poller // Polls methods using contract's token holder addresses and persists them using method datastore
-
- // Store contract configuration information
- Config config.ContractConfig
-
- // Store contract info as mapping to contract address
- Contracts map[string]*contract.Contract
-
- // Latest block in the block repository
- LastBlock int64
-}
-
-// NewTransformer takes in contract config, blockchain, and database, and returns a new Transformer
-func NewTransformer(con config.ContractConfig, BC core.BlockChain, DB *postgres.DB) *Transformer {
- return &Transformer{
- Poller: poller.NewPoller(BC, DB, types.FullSync),
- Parser: parser.NewParser(con.Network),
- Retriever: retriever.NewBlockRetriever(DB),
- Converter: &converter.Converter{},
- Contracts: map[string]*contract.Contract{},
- WatchedEventRepository: repositories.WatchedEventRepository{DB: DB},
- FilterRepository: repositories.FilterRepository{DB: DB},
- TransformedEventRepository: repository.NewEventRepository(DB, types.FullSync),
- Config: con,
- }
-}
-
-// Init initializes the transformer
-// Use after creating and setting transformer
-// Loops over all of the addr => filter sets
-// Uses parser to pull event info from abi
-// Use this info to generate event filters
-func (tr *Transformer) Init() error {
- for contractAddr := range tr.Config.Addresses {
- // Configure Abi
- if tr.Config.Abis[contractAddr] == "" {
- // If no abi is given in the config, this method will try fetching from internal look-up table and etherscan
- err := tr.Parser.Parse(contractAddr)
- if err != nil {
- return err
- }
- } else {
- // If we have an abi from the config, load that into the parser
- err := tr.Parser.ParseAbiStr(tr.Config.Abis[contractAddr])
- if err != nil {
- return err
- }
- }
-
- // Get first block and most recent block number in the header repo
- firstBlock, err := tr.Retriever.RetrieveFirstBlock(contractAddr)
- if err != nil {
- return err
- }
- // Set to specified range if it falls within the bounds
- if firstBlock < tr.Config.StartingBlocks[contractAddr] {
- firstBlock = tr.Config.StartingBlocks[contractAddr]
- }
-
- // Get contract name if it has one
- var name = new(string)
- pollingErr := tr.Poller.FetchContractData(tr.Parser.Abi(), contractAddr, "name", nil, name, tr.LastBlock)
- if pollingErr != nil {
- // can't return this error because "name" might not exist on the contract
- logrus.Warnf("error fetching contract data: %s", pollingErr.Error())
- }
-
- // Remove any potential accidental duplicate inputs in arg filter values
- eventArgs := map[string]bool{}
- for _, arg := range tr.Config.EventArgs[contractAddr] {
- eventArgs[arg] = true
- }
- methodArgs := map[string]bool{}
- for _, arg := range tr.Config.MethodArgs[contractAddr] {
- methodArgs[arg] = true
- }
-
- // Aggregate info into contract object
- info := contract.Contract{
- Name: *name,
- Network: tr.Config.Network,
- Address: contractAddr,
- Abi: tr.Parser.Abi(),
- ParsedAbi: tr.Parser.ParsedAbi(),
- StartingBlock: firstBlock,
- Events: tr.Parser.GetEvents(tr.Config.Events[contractAddr]),
- Methods: tr.Parser.GetSelectMethods(tr.Config.Methods[contractAddr]),
- FilterArgs: eventArgs,
- MethodArgs: methodArgs,
- Piping: tr.Config.Piping[contractAddr],
- }.Init()
-
- // Use info to create filters
- err = info.GenerateFilters()
- if err != nil {
- return err
- }
-
- // Iterate over filters and push them to the repo using filter repository interface
- for _, filter := range info.Filters {
- err = tr.FilterRepository.CreateFilter(filter)
- if err != nil {
- return err
- }
- }
-
- // Store contract info for further processing
- tr.Contracts[contractAddr] = info
- }
-
- // Get the most recent block number in the block repo
- var err error
- tr.LastBlock, err = tr.Retriever.RetrieveMostRecentBlock()
- if err != nil {
- return err
- }
-
- return nil
-}
-
-// Execute runs the transformation processes
-// Iterates through stored, initialized contract objects
-// Iterates through contract's event filters, grabbing watched event logs
-// Uses converter to convert logs into custom log type
-// Persists converted logs into custom postgres tables
-// Calls selected methods, using token holder address generated during event log conversion
-func (tr *Transformer) Execute() error {
- if len(tr.Contracts) == 0 {
- return errors.New("error: transformer has no initialized contracts to work with")
- }
- // Iterate through all internal contracts
- for _, con := range tr.Contracts {
- // Update converter with current contract
- tr.Converter.Update(con)
-
- // Iterate through contract filters and get watched event logs
- for eventSig, filter := range con.Filters {
- watchedEvents, err := tr.WatchedEventRepository.GetWatchedEvents(filter.Name)
- if err != nil {
- return err
- }
-
- // Iterate over watched event logs
- for _, we := range watchedEvents {
- // Convert them to our custom log type
- cstm, err := tr.Converter.Convert(*we, con.Events[eventSig])
- if err != nil {
- return err
- }
- if cstm == nil {
- continue
- }
-
- // If log is not empty, immediately persist in repo
- // Run this in seperate goroutine?
- err = tr.TransformedEventRepository.PersistLogs([]types.Log{*cstm}, con.Events[eventSig], con.Address, con.Name)
- if err != nil {
- return err
- }
- }
- }
-
- // After persisting all watched event logs
- // poller polls select contract methods
- // and persists the results into custom pg tables
- if err := tr.Poller.PollContract(*con, tr.LastBlock); err != nil {
- return err
- }
- }
-
- // At the end of a transformation cycle, and before the next
- // update the latest block from the block repo
- var err error
- tr.LastBlock, err = tr.Retriever.RetrieveMostRecentBlock()
- if err != nil {
- return err
- }
-
- return nil
-}
-
-// GetConfig returns the transformers config; satisfies the transformer interface
-func (tr *Transformer) GetConfig() config.ContractConfig {
- return tr.Config
-}
diff --git a/pkg/eth/contract_watcher/full/transformer/transformer_suite_test.go b/pkg/eth/contract_watcher/full/transformer/transformer_suite_test.go
deleted file mode 100644
index aac31e85..00000000
--- a/pkg/eth/contract_watcher/full/transformer/transformer_suite_test.go
+++ /dev/null
@@ -1,35 +0,0 @@
-// VulcanizeDB
-// Copyright © 2019 Vulcanize
-
-// This program is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Affero General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-
-// This program is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Affero General Public License for more details.
-
-// You should have received a copy of the GNU Affero General Public License
-// along with this program. If not, see .
-
-package transformer_test
-
-import (
- "io/ioutil"
- "testing"
-
- . "github.com/onsi/ginkgo"
- . "github.com/onsi/gomega"
- "github.com/sirupsen/logrus"
-)
-
-func TestTransformer(t *testing.T) {
- RegisterFailHandler(Fail)
- RunSpecs(t, "Full Transformer Suite Test")
-}
-
-var _ = BeforeSuite(func() {
- logrus.SetOutput(ioutil.Discard)
-})
diff --git a/pkg/eth/contract_watcher/full/transformer/transformer_test.go b/pkg/eth/contract_watcher/full/transformer/transformer_test.go
deleted file mode 100644
index ea5104b4..00000000
--- a/pkg/eth/contract_watcher/full/transformer/transformer_test.go
+++ /dev/null
@@ -1,98 +0,0 @@
-// VulcanizeDB
-// Copyright © 2019 Vulcanize
-
-// This program is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Affero General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-
-// This program is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Affero General Public License for more details.
-
-// You should have received a copy of the GNU Affero General Public License
-// along with this program. If not, see .
-
-package transformer_test
-
-import (
- "math/rand"
- "time"
-
- . "github.com/onsi/ginkgo"
- . "github.com/onsi/gomega"
-
- "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/full/retriever"
- "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/full/transformer"
- "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/contract"
- "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/helpers/test_helpers/mocks"
- "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/parser"
- "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/poller"
- "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/types"
- "github.com/vulcanize/vulcanizedb/pkg/eth/fakes"
-)
-
-var _ = Describe("Transformer", func() {
- var fakeAddress = "0x1234567890abcdef"
- rand.Seed(time.Now().UnixNano())
-
- Describe("Init", func() {
- It("Initializes transformer's contract objects", func() {
- blockRetriever := &fakes.MockFullSyncBlockRetriever{}
- firstBlock := int64(1)
- mostRecentBlock := int64(2)
- blockRetriever.FirstBlock = firstBlock
- blockRetriever.MostRecentBlock = mostRecentBlock
-
- parsr := &fakes.MockParser{}
- fakeAbi := "fake_abi"
- eventName := "Transfer"
- event := types.Event{}
- parsr.AbiToReturn = fakeAbi
- parsr.EventName = eventName
- parsr.Event = event
-
- pollr := &fakes.MockPoller{}
- fakeContractName := "fake_contract_name"
- pollr.ContractName = fakeContractName
-
- t := getTransformer(blockRetriever, parsr, pollr)
-
- err := t.Init()
-
- Expect(err).ToNot(HaveOccurred())
-
- c, ok := t.Contracts[fakeAddress]
- Expect(ok).To(Equal(true))
-
- Expect(c.StartingBlock).To(Equal(firstBlock))
- Expect(t.LastBlock).To(Equal(mostRecentBlock))
- Expect(c.Abi).To(Equal(fakeAbi))
- Expect(c.Name).To(Equal(fakeContractName))
- Expect(c.Address).To(Equal(fakeAddress))
- })
-
- It("Fails to initialize if first and most recent blocks cannot be fetched from vDB", func() {
- blockRetriever := &fakes.MockFullSyncBlockRetriever{}
- blockRetriever.FirstBlockErr = fakes.FakeError
- t := getTransformer(blockRetriever, &fakes.MockParser{}, &fakes.MockPoller{})
-
- err := t.Init()
-
- Expect(err).To(HaveOccurred())
- Expect(err).To(MatchError(fakes.FakeError))
- })
- })
-})
-
-func getTransformer(blockRetriever retriever.BlockRetriever, parsr parser.Parser, pollr poller.Poller) transformer.Transformer {
- return transformer.Transformer{
- FilterRepository: &fakes.MockFilterRepository{},
- Parser: parsr,
- Retriever: blockRetriever,
- Poller: pollr,
- Contracts: map[string]*contract.Contract{},
- Config: mocks.MockConfig,
- }
-}
diff --git a/pkg/eth/contract_watcher/header/repository/header_repository.go b/pkg/eth/contract_watcher/header/repository/header_repository.go
index 0330f6fc..1be851de 100644
--- a/pkg/eth/contract_watcher/header/repository/header_repository.go
+++ b/pkg/eth/contract_watcher/header/repository/header_repository.go
@@ -23,7 +23,7 @@ import (
"github.com/sirupsen/logrus"
"github.com/vulcanize/vulcanizedb/pkg/eth/core"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
)
const columnCacheSize = 1000
diff --git a/pkg/eth/contract_watcher/header/repository/header_repository_test.go b/pkg/eth/contract_watcher/header/repository/header_repository_test.go
index 7c47a8c1..de517ef3 100644
--- a/pkg/eth/contract_watcher/header/repository/header_repository_test.go
+++ b/pkg/eth/contract_watcher/header/repository/header_repository_test.go
@@ -26,8 +26,8 @@ import (
"github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/helpers/test_helpers"
"github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/helpers/test_helpers/mocks"
"github.com/vulcanize/vulcanizedb/pkg/eth/core"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
"github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres/repositories"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
)
var _ = Describe("Repository", func() {
diff --git a/pkg/eth/contract_watcher/header/retriever/block_retriever.go b/pkg/eth/contract_watcher/header/retriever/block_retriever.go
index e2350b3d..50d218bc 100644
--- a/pkg/eth/contract_watcher/header/retriever/block_retriever.go
+++ b/pkg/eth/contract_watcher/header/retriever/block_retriever.go
@@ -17,7 +17,7 @@
package retriever
import (
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
)
// BlockRetriever is used to retrieve the first block for a given contract and the most recent block
diff --git a/pkg/eth/contract_watcher/header/retriever/block_retriever_test.go b/pkg/eth/contract_watcher/header/retriever/block_retriever_test.go
index 0406d8e5..a9891100 100644
--- a/pkg/eth/contract_watcher/header/retriever/block_retriever_test.go
+++ b/pkg/eth/contract_watcher/header/retriever/block_retriever_test.go
@@ -23,8 +23,8 @@ import (
"github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/header/retriever"
"github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/helpers/test_helpers"
"github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/helpers/test_helpers/mocks"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
"github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres/repositories"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
)
var _ = Describe("Block Retriever", func() {
diff --git a/pkg/eth/contract_watcher/header/transformer/transformer.go b/pkg/eth/contract_watcher/header/transformer/transformer.go
index bf18474d..7b9c9d2a 100644
--- a/pkg/eth/contract_watcher/header/transformer/transformer.go
+++ b/pkg/eth/contract_watcher/header/transformer/transformer.go
@@ -38,7 +38,7 @@ import (
srep "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/repository"
"github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/types"
"github.com/vulcanize/vulcanizedb/pkg/eth/core"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
)
// Transformer is the top level struct for transforming watched contract data
diff --git a/pkg/eth/contract_watcher/shared/helpers/test_helpers/database.go b/pkg/eth/contract_watcher/shared/helpers/test_helpers/database.go
index 0ec368b1..21dce73e 100644
--- a/pkg/eth/contract_watcher/shared/helpers/test_helpers/database.go
+++ b/pkg/eth/contract_watcher/shared/helpers/test_helpers/database.go
@@ -17,8 +17,6 @@
package test_helpers
import (
- "math/rand"
-
"github.com/ethereum/go-ethereum/ethclient"
"github.com/ethereum/go-ethereum/rpc"
. "github.com/onsi/gomega"
@@ -31,9 +29,8 @@ import (
"github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/helpers/test_helpers/mocks"
rpc2 "github.com/vulcanize/vulcanizedb/pkg/eth/converters/rpc"
"github.com/vulcanize/vulcanizedb/pkg/eth/core"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres/repositories"
"github.com/vulcanize/vulcanizedb/pkg/eth/node"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
"github.com/vulcanize/vulcanizedb/test_config"
)
@@ -129,34 +126,6 @@ func SetupDBandBC() (*postgres.DB, core.BlockChain) {
return db, blockChain
}
-func SetupTusdRepo(vulcanizeLogID *int64, wantedEvents, wantedMethods []string) (*postgres.DB, *contract.Contract) {
- db, err := postgres.NewDB(config.Database{
- Hostname: "localhost",
- Name: "vulcanize_testing",
- Port: 5432,
- }, core.Node{})
- Expect(err).NotTo(HaveOccurred())
-
- receiptRepository := repositories.FullSyncReceiptRepository{DB: db}
- logRepository := repositories.FullSyncLogRepository{DB: db}
- blockRepository := *repositories.NewBlockRepository(db)
-
- blockNumber := rand.Int63()
- blockID := CreateBlock(blockNumber, blockRepository)
-
- receipts := []core.Receipt{{Logs: []core.FullSyncLog{{}}}}
-
- err = receiptRepository.CreateReceiptsAndLogs(blockID, receipts)
- Expect(err).ToNot(HaveOccurred())
-
- err = logRepository.Get(vulcanizeLogID, `SELECT id FROM full_sync_logs`)
- Expect(err).ToNot(HaveOccurred())
-
- info := SetupTusdContract(wantedEvents, wantedMethods)
-
- return db, info
-}
-
func SetupTusdContract(wantedEvents, wantedMethods []string) *contract.Contract {
p := mocks.NewParser(constants.TusdAbiString)
err := p.Parse(constants.TusdContractAddress)
@@ -237,27 +206,12 @@ func TearDown(db *postgres.DB) {
_, err = tx.Exec(`DELETE FROM addresses`)
Expect(err).NotTo(HaveOccurred())
- _, err = tx.Exec(`DELETE FROM eth_blocks`)
- Expect(err).NotTo(HaveOccurred())
-
_, err = tx.Exec(`DELETE FROM headers`)
Expect(err).NotTo(HaveOccurred())
- _, err = tx.Exec(`DELETE FROM full_sync_logs`)
- Expect(err).NotTo(HaveOccurred())
-
- _, err = tx.Exec(`DELETE FROM log_filters`)
- Expect(err).NotTo(HaveOccurred())
-
- _, err = tx.Exec(`DELETE FROM full_sync_transactions`)
- Expect(err).NotTo(HaveOccurred())
-
_, err = tx.Exec("DELETE FROM header_sync_transactions")
Expect(err).NotTo(HaveOccurred())
- _, err = tx.Exec(`DELETE FROM full_sync_receipts`)
- Expect(err).NotTo(HaveOccurred())
-
_, err = tx.Exec(`DELETE FROM header_sync_receipts`)
Expect(err).NotTo(HaveOccurred())
@@ -287,10 +241,3 @@ func TearDown(db *postgres.DB) {
_, err = db.Exec(`VACUUM checked_headers`)
Expect(err).NotTo(HaveOccurred())
}
-
-func CreateBlock(blockNumber int64, repository repositories.BlockRepository) int64 {
- blockID, err := repository.CreateOrUpdateBlock(core.Block{Number: blockNumber})
- Expect(err).NotTo(HaveOccurred())
-
- return blockID
-}
diff --git a/pkg/eth/contract_watcher/shared/poller/poller.go b/pkg/eth/contract_watcher/shared/poller/poller.go
index 9dc17b81..e95157c9 100644
--- a/pkg/eth/contract_watcher/shared/poller/poller.go
+++ b/pkg/eth/contract_watcher/shared/poller/poller.go
@@ -30,7 +30,7 @@ import (
"github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/repository"
"github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/types"
"github.com/vulcanize/vulcanizedb/pkg/eth/core"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
)
// Poller is the interface for polling public contract methods
diff --git a/pkg/eth/contract_watcher/shared/repository/event_repository.go b/pkg/eth/contract_watcher/shared/repository/event_repository.go
index bd67bbac..3653ca93 100644
--- a/pkg/eth/contract_watcher/shared/repository/event_repository.go
+++ b/pkg/eth/contract_watcher/shared/repository/event_repository.go
@@ -25,7 +25,7 @@ import (
"github.com/sirupsen/logrus"
"github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/types"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
)
const (
diff --git a/pkg/eth/contract_watcher/shared/repository/event_repository_test.go b/pkg/eth/contract_watcher/shared/repository/event_repository_test.go
deleted file mode 100644
index 4f5ff1b4..00000000
--- a/pkg/eth/contract_watcher/shared/repository/event_repository_test.go
+++ /dev/null
@@ -1,364 +0,0 @@
-// VulcanizeDB
-// Copyright © 2019 Vulcanize
-
-// This program is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Affero General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-
-// This program is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Affero General Public License for more details.
-
-// You should have received a copy of the GNU Affero General Public License
-// along with this program. If not, see .
-
-package repository_test
-
-import (
- "encoding/json"
- "fmt"
- "strings"
-
- "github.com/ethereum/go-ethereum/common"
- geth "github.com/ethereum/go-ethereum/core/types"
- . "github.com/onsi/ginkgo"
- . "github.com/onsi/gomega"
-
- fc "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/full/converter"
- lc "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/header/converter"
- lr "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/header/repository"
- "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/constants"
- "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/contract"
- "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/helpers/test_helpers"
- "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/helpers/test_helpers/mocks"
- sr "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/repository"
- "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/types"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres/repositories"
-)
-
-var _ = Describe("Repository", func() {
- var db *postgres.DB
- var dataStore sr.EventRepository
- var err error
- var log *types.Log
- var logs []types.Log
- var con *contract.Contract
- var vulcanizeLogId int64
- var wantedEvents = []string{"Transfer"}
- var wantedMethods = []string{"balanceOf"}
- var event types.Event
- var headerID int64
- var mockEvent = mocks.MockTranferEvent
- var mockLog1 = mocks.MockTransferLog1
- var mockLog2 = mocks.MockTransferLog2
-
- BeforeEach(func() {
- db, con = test_helpers.SetupTusdRepo(&vulcanizeLogId, wantedEvents, wantedMethods)
- mockEvent.LogID = vulcanizeLogId
-
- event = con.Events["Transfer"]
- err = con.GenerateFilters()
- Expect(err).ToNot(HaveOccurred())
- })
-
- AfterEach(func() {
- test_helpers.TearDown(db)
- })
-
- Describe("Full sync mode", func() {
- BeforeEach(func() {
- dataStore = sr.NewEventRepository(db, types.FullSync)
- })
-
- Describe("CreateContractSchema", func() {
- It("Creates schema if it doesn't exist", func() {
- created, err := dataStore.CreateContractSchema(con.Address)
- Expect(err).ToNot(HaveOccurred())
- Expect(created).To(Equal(true))
-
- created, err = dataStore.CreateContractSchema(con.Address)
- Expect(err).ToNot(HaveOccurred())
- Expect(created).To(Equal(false))
- })
-
- It("Caches schema it creates so that it does not need to repeatedly query the database to check for it's existence", func() {
- _, ok := dataStore.CheckSchemaCache(con.Address)
- Expect(ok).To(Equal(false))
-
- created, err := dataStore.CreateContractSchema(con.Address)
- Expect(err).ToNot(HaveOccurred())
- Expect(created).To(Equal(true))
-
- v, ok := dataStore.CheckSchemaCache(con.Address)
- Expect(ok).To(Equal(true))
- Expect(v).To(Equal(true))
- })
- })
-
- Describe("CreateEventTable", func() {
- It("Creates table if it doesn't exist", func() {
- created, err := dataStore.CreateContractSchema(con.Address)
- Expect(err).ToNot(HaveOccurred())
- Expect(created).To(Equal(true))
-
- created, err = dataStore.CreateEventTable(con.Address, event)
- Expect(err).ToNot(HaveOccurred())
- Expect(created).To(Equal(true))
-
- created, err = dataStore.CreateEventTable(con.Address, event)
- Expect(err).ToNot(HaveOccurred())
- Expect(created).To(Equal(false))
- })
-
- It("Caches table it creates so that it does not need to repeatedly query the database to check for it's existence", func() {
- created, err := dataStore.CreateContractSchema(con.Address)
- Expect(err).ToNot(HaveOccurred())
- Expect(created).To(Equal(true))
-
- tableID := fmt.Sprintf("%s_%s.%s_event", types.FullSync, strings.ToLower(con.Address), strings.ToLower(event.Name))
- _, ok := dataStore.CheckTableCache(tableID)
- Expect(ok).To(Equal(false))
-
- created, err = dataStore.CreateEventTable(con.Address, event)
- Expect(err).ToNot(HaveOccurred())
- Expect(created).To(Equal(true))
-
- v, ok := dataStore.CheckTableCache(tableID)
- Expect(ok).To(Equal(true))
- Expect(v).To(Equal(true))
- })
- })
-
- Describe("PersistLogs", func() {
- BeforeEach(func() {
- c := fc.Converter{}
- c.Update(con)
- log, err = c.Convert(mockEvent, event)
- Expect(err).ToNot(HaveOccurred())
- })
-
- It("Persists contract event log values into custom tables", func() {
- err = dataStore.PersistLogs([]types.Log{*log}, event, con.Address, con.Name)
- Expect(err).ToNot(HaveOccurred())
-
- b, ok := con.EmittedAddrs[common.HexToAddress("0x000000000000000000000000000000000000Af21")]
- Expect(ok).To(Equal(true))
- Expect(b).To(Equal(true))
-
- b, ok = con.EmittedAddrs[common.HexToAddress("0x09BbBBE21a5975cAc061D82f7b843bCE061BA391")]
- Expect(ok).To(Equal(true))
- Expect(b).To(Equal(true))
-
- scanLog := test_helpers.TransferLog{}
-
- err = db.QueryRowx(fmt.Sprintf("SELECT * FROM full_%s.transfer_event", constants.TusdContractAddress)).StructScan(&scanLog)
- Expect(err).ToNot(HaveOccurred())
- expectedLog := test_helpers.TransferLog{
- ID: 1,
- VulcanizeLogID: vulcanizeLogId,
- TokenName: "TrueUSD",
- Block: 5488076,
- Tx: "0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad6546ae",
- From: "0x000000000000000000000000000000000000Af21",
- To: "0x09BbBBE21a5975cAc061D82f7b843bCE061BA391",
- Value: "1097077688018008265106216665536940668749033598146",
- }
- Expect(scanLog).To(Equal(expectedLog))
- })
-
- It("Doesn't persist duplicate event logs", func() {
- // Try to persist the same log twice in a single call
- err = dataStore.PersistLogs([]types.Log{*log, *log}, event, con.Address, con.Name)
- Expect(err).ToNot(HaveOccurred())
-
- scanLog := test_helpers.TransferLog{}
-
- err = db.QueryRowx(fmt.Sprintf("SELECT * FROM full_%s.transfer_event", constants.TusdContractAddress)).StructScan(&scanLog)
- Expect(err).ToNot(HaveOccurred())
- expectedLog := test_helpers.TransferLog{
- ID: 1,
- VulcanizeLogID: vulcanizeLogId,
- TokenName: "TrueUSD",
- Block: 5488076,
- Tx: "0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad6546ae",
- From: "0x000000000000000000000000000000000000Af21",
- To: "0x09BbBBE21a5975cAc061D82f7b843bCE061BA391",
- Value: "1097077688018008265106216665536940668749033598146",
- }
- Expect(scanLog).To(Equal(expectedLog))
-
- // Attempt to persist the same log again in separate call
- err = dataStore.PersistLogs([]types.Log{*log}, event, con.Address, con.Name)
- Expect(err).ToNot(HaveOccurred())
-
- // Show that no new logs were entered
- var count int
- err = db.Get(&count, fmt.Sprintf("SELECT COUNT(*) FROM full_%s.transfer_event", constants.TusdContractAddress))
- Expect(err).ToNot(HaveOccurred())
- Expect(count).To(Equal(1))
- })
-
- It("Fails with empty log", func() {
- err = dataStore.PersistLogs([]types.Log{}, event, con.Address, con.Name)
- Expect(err).To(HaveOccurred())
- })
- })
- })
-
- Describe("Header sync mode", func() {
- BeforeEach(func() {
- dataStore = sr.NewEventRepository(db, types.HeaderSync)
- })
-
- Describe("CreateContractSchema", func() {
- It("Creates schema if it doesn't exist", func() {
- created, err := dataStore.CreateContractSchema(con.Address)
- Expect(err).ToNot(HaveOccurred())
- Expect(created).To(Equal(true))
-
- created, err = dataStore.CreateContractSchema(con.Address)
- Expect(err).ToNot(HaveOccurred())
- Expect(created).To(Equal(false))
- })
-
- It("Caches schema it creates so that it does not need to repeatedly query the database to check for it's existence", func() {
- _, ok := dataStore.CheckSchemaCache(con.Address)
- Expect(ok).To(Equal(false))
-
- created, err := dataStore.CreateContractSchema(con.Address)
- Expect(err).ToNot(HaveOccurred())
- Expect(created).To(Equal(true))
-
- v, ok := dataStore.CheckSchemaCache(con.Address)
- Expect(ok).To(Equal(true))
- Expect(v).To(Equal(true))
- })
-
- It("Caches table it creates so that it does not need to repeatedly query the database to check for it's existence", func() {
- created, err := dataStore.CreateContractSchema(con.Address)
- Expect(err).ToNot(HaveOccurred())
- Expect(created).To(Equal(true))
-
- tableID := fmt.Sprintf("%s_%s.%s_event", types.HeaderSync, strings.ToLower(con.Address), strings.ToLower(event.Name))
- _, ok := dataStore.CheckTableCache(tableID)
- Expect(ok).To(Equal(false))
-
- created, err = dataStore.CreateEventTable(con.Address, event)
- Expect(err).ToNot(HaveOccurred())
- Expect(created).To(Equal(true))
-
- v, ok := dataStore.CheckTableCache(tableID)
- Expect(ok).To(Equal(true))
- Expect(v).To(Equal(true))
- })
- })
-
- Describe("CreateEventTable", func() {
- It("Creates table if it doesn't exist", func() {
- created, err := dataStore.CreateContractSchema(con.Address)
- Expect(err).ToNot(HaveOccurred())
- Expect(created).To(Equal(true))
-
- created, err = dataStore.CreateEventTable(con.Address, event)
- Expect(err).ToNot(HaveOccurred())
- Expect(created).To(Equal(true))
-
- created, err = dataStore.CreateEventTable(con.Address, event)
- Expect(err).ToNot(HaveOccurred())
- Expect(created).To(Equal(false))
- })
- })
-
- Describe("PersistLogs", func() {
- BeforeEach(func() {
- headerRepository := repositories.NewHeaderRepository(db)
- headerID, err = headerRepository.CreateOrUpdateHeader(mocks.MockHeader1)
- Expect(err).ToNot(HaveOccurred())
- c := lc.Converter{}
- c.Update(con)
- logs, err = c.Convert([]geth.Log{mockLog1, mockLog2}, event, headerID)
- Expect(err).ToNot(HaveOccurred())
- })
-
- It("Persists contract event log values into custom tables", func() {
- hr := lr.NewHeaderRepository(db)
- err = hr.AddCheckColumn(event.Name + "_" + con.Address)
- Expect(err).ToNot(HaveOccurred())
-
- err = dataStore.PersistLogs(logs, event, con.Address, con.Name)
- Expect(err).ToNot(HaveOccurred())
-
- var count int
- err = db.Get(&count, fmt.Sprintf("SELECT COUNT(*) FROM header_%s.transfer_event", constants.TusdContractAddress))
- Expect(err).ToNot(HaveOccurred())
- Expect(count).To(Equal(2))
-
- scanLog := test_helpers.HeaderSyncTransferLog{}
- err = db.QueryRowx(fmt.Sprintf("SELECT * FROM header_%s.transfer_event LIMIT 1", constants.TusdContractAddress)).StructScan(&scanLog)
- Expect(err).ToNot(HaveOccurred())
- Expect(scanLog.HeaderID).To(Equal(headerID))
- Expect(scanLog.TokenName).To(Equal("TrueUSD"))
- Expect(scanLog.TxIndex).To(Equal(int64(110)))
- Expect(scanLog.LogIndex).To(Equal(int64(1)))
- Expect(scanLog.From).To(Equal("0x000000000000000000000000000000000000Af21"))
- Expect(scanLog.To).To(Equal("0x09BbBBE21a5975cAc061D82f7b843bCE061BA391"))
- Expect(scanLog.Value).To(Equal("1097077688018008265106216665536940668749033598146"))
-
- var expectedRawLog, rawLog geth.Log
- err = json.Unmarshal(logs[0].Raw, &expectedRawLog)
- Expect(err).ToNot(HaveOccurred())
- err = json.Unmarshal(scanLog.RawLog, &rawLog)
- Expect(err).ToNot(HaveOccurred())
- Expect(rawLog).To(Equal(expectedRawLog))
- })
-
- It("Doesn't persist duplicate event logs", func() {
- hr := lr.NewHeaderRepository(db)
- err = hr.AddCheckColumn(event.Name + "_" + con.Address)
- Expect(err).ToNot(HaveOccurred())
-
- // Successfully persist the two unique logs
- err = dataStore.PersistLogs(logs, event, con.Address, con.Name)
- Expect(err).ToNot(HaveOccurred())
-
- // Try to insert the same logs again
- err = dataStore.PersistLogs(logs, event, con.Address, con.Name)
- Expect(err).ToNot(HaveOccurred())
-
- // Show that no new logs were entered
- var count int
- err = db.Get(&count, fmt.Sprintf("SELECT COUNT(*) FROM header_%s.transfer_event", constants.TusdContractAddress))
- Expect(err).ToNot(HaveOccurred())
- Expect(count).To(Equal(2))
- })
-
- It("inserts additional log if only some are duplicate", func() {
- hr := lr.NewHeaderRepository(db)
- err = hr.AddCheckColumn(event.Name + "_" + con.Address)
- Expect(err).ToNot(HaveOccurred())
-
- // Successfully persist first log
- err = dataStore.PersistLogs([]types.Log{logs[0]}, event, con.Address, con.Name)
- Expect(err).ToNot(HaveOccurred())
-
- // Successfully persist second log even though first already persisted
- err = dataStore.PersistLogs(logs, event, con.Address, con.Name)
- Expect(err).ToNot(HaveOccurred())
-
- // Show that both logs were entered
- var count int
- err = db.Get(&count, fmt.Sprintf("SELECT COUNT(*) FROM header_%s.transfer_event", constants.TusdContractAddress))
- Expect(err).ToNot(HaveOccurred())
- Expect(count).To(Equal(2))
- })
-
- It("Fails with empty log", func() {
- err = dataStore.PersistLogs([]types.Log{}, event, con.Address, con.Name)
- Expect(err).To(HaveOccurred())
- })
- })
- })
-})
diff --git a/pkg/eth/contract_watcher/shared/repository/method_repository.go b/pkg/eth/contract_watcher/shared/repository/method_repository.go
index 3525b226..711b36e2 100644
--- a/pkg/eth/contract_watcher/shared/repository/method_repository.go
+++ b/pkg/eth/contract_watcher/shared/repository/method_repository.go
@@ -25,7 +25,7 @@ import (
"github.com/sirupsen/logrus"
"github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/types"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
)
const methodCacheSize = 1000
diff --git a/pkg/eth/contract_watcher/shared/repository/method_repository_test.go b/pkg/eth/contract_watcher/shared/repository/method_repository_test.go
index 89dea7fe..3fc43736 100644
--- a/pkg/eth/contract_watcher/shared/repository/method_repository_test.go
+++ b/pkg/eth/contract_watcher/shared/repository/method_repository_test.go
@@ -28,7 +28,7 @@ import (
"github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/helpers/test_helpers"
"github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/repository"
"github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/types"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
)
var _ = Describe("Repository", func() {
diff --git a/pkg/eth/contract_watcher/shared/retriever/address_retriever.go b/pkg/eth/contract_watcher/shared/retriever/address_retriever.go
index f4f35118..f24340eb 100644
--- a/pkg/eth/contract_watcher/shared/retriever/address_retriever.go
+++ b/pkg/eth/contract_watcher/shared/retriever/address_retriever.go
@@ -25,7 +25,7 @@ import (
"github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/contract"
"github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/types"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
)
// AddressRetriever is used to retrieve the addresses associated with a contract
diff --git a/pkg/eth/contract_watcher/shared/retriever/address_retriever_test.go b/pkg/eth/contract_watcher/shared/retriever/address_retriever_test.go
deleted file mode 100644
index 6a83792c..00000000
--- a/pkg/eth/contract_watcher/shared/retriever/address_retriever_test.go
+++ /dev/null
@@ -1,107 +0,0 @@
-// VulcanizeDB
-// Copyright © 2019 Vulcanize
-
-// This program is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Affero General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-
-// This program is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Affero General Public License for more details.
-
-// You should have received a copy of the GNU Affero General Public License
-// along with this program. If not, see .
-
-package retriever_test
-
-import (
- "github.com/ethereum/go-ethereum/common"
- . "github.com/onsi/ginkgo"
- . "github.com/onsi/gomega"
-
- "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/full/converter"
- "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/constants"
- "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/contract"
- "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/helpers/test_helpers"
- "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/repository"
- "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/retriever"
- "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/types"
- "github.com/vulcanize/vulcanizedb/pkg/eth/core"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
-)
-
-var mockEvent = core.WatchedEvent{
- Name: constants.TransferEvent.String(),
- BlockNumber: 5488076,
- Address: constants.TusdContractAddress,
- TxHash: "0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad6546ae",
- Index: 110,
- Topic0: constants.TransferEvent.Signature(),
- Topic1: "0x000000000000000000000000000000000000000000000000000000000000af21",
- Topic2: "0x9dd48110dcc444fdc242510c09bbbbe21a5975cac061d82f7b843bce061ba391",
- Topic3: "",
- Data: "0x000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc200000000000000000000000089d24a6b4ccb1b6faa2625fe562bdd9a23260359000000000000000000000000000000000000000000000000392d2e2bda9c00000000000000000000000000000000000000000000000000927f41fa0a4a418000000000000000000000000000000000000000000000000000000000005adcfebe",
-}
-
-var _ = Describe("Address Retriever Test", func() {
- var db *postgres.DB
- var dataStore repository.EventRepository
- var info *contract.Contract
- var vulcanizeLogId int64
- var log *types.Log
- var r retriever.AddressRetriever
- var wantedEvents = []string{"Transfer"}
-
- BeforeEach(func() {
- db, info = test_helpers.SetupTusdRepo(&vulcanizeLogId, wantedEvents, []string{})
- mockEvent.LogID = vulcanizeLogId
-
- event := info.Events["Transfer"]
- filterErr := info.GenerateFilters()
- Expect(filterErr).ToNot(HaveOccurred())
-
- c := converter.Converter{}
- c.Update(info)
- var convertErr error
- log, convertErr = c.Convert(mockEvent, event)
- Expect(convertErr).ToNot(HaveOccurred())
-
- dataStore = repository.NewEventRepository(db, types.FullSync)
- persistErr := dataStore.PersistLogs([]types.Log{*log}, event, info.Address, info.Name)
- Expect(persistErr).ToNot(HaveOccurred())
-
- r = retriever.NewAddressRetriever(db, types.FullSync)
- })
-
- AfterEach(func() {
- test_helpers.TearDown(db)
- })
-
- Describe("RetrieveTokenHolderAddresses", func() {
- It("Retrieves a list of token holder addresses from persisted event logs", func() {
- addresses, retrieveErr := r.RetrieveTokenHolderAddresses(*info)
- Expect(retrieveErr).ToNot(HaveOccurred())
-
- _, ok := addresses[common.HexToAddress("0x000000000000000000000000000000000000000000000000000000000000af21")]
- Expect(ok).To(Equal(true))
-
- _, ok = addresses[common.HexToAddress("0x9dd48110dcc444fdc242510c09bbbbe21a5975cac061d82f7b843bce061ba391")]
- Expect(ok).To(Equal(true))
-
- _, ok = addresses[common.HexToAddress("0x")]
- Expect(ok).To(Equal(false))
-
- _, ok = addresses[common.HexToAddress(constants.TusdContractAddress)]
- Expect(ok).To(Equal(false))
-
- })
-
- It("Returns empty list when empty contract info is used", func() {
- addresses, retrieveErr := r.RetrieveTokenHolderAddresses(contract.Contract{})
- Expect(retrieveErr).ToNot(HaveOccurred())
- Expect(len(addresses)).To(Equal(0))
- })
- })
-})
diff --git a/pkg/eth/contract_watcher/shared/retriever/retriever_suite_test.go b/pkg/eth/contract_watcher/shared/retriever/retriever_suite_test.go
deleted file mode 100644
index 6056bbfc..00000000
--- a/pkg/eth/contract_watcher/shared/retriever/retriever_suite_test.go
+++ /dev/null
@@ -1,35 +0,0 @@
-// VulcanizeDB
-// Copyright © 2019 Vulcanize
-
-// This program is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Affero General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-
-// This program is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Affero General Public License for more details.
-
-// You should have received a copy of the GNU Affero General Public License
-// along with this program. If not, see .
-
-package retriever_test
-
-import (
- "io/ioutil"
- "testing"
-
- . "github.com/onsi/ginkgo"
- . "github.com/onsi/gomega"
- "github.com/sirupsen/logrus"
-)
-
-func TestRetriever(t *testing.T) {
- RegisterFailHandler(Fail)
- RunSpecs(t, "Address Retriever Suite Test")
-}
-
-var _ = BeforeSuite(func() {
- logrus.SetOutput(ioutil.Discard)
-})
diff --git a/pkg/eth/core/node_info.go b/pkg/eth/core/node_info.go
index db1d9de6..245bd532 100644
--- a/pkg/eth/core/node_info.go
+++ b/pkg/eth/core/node_info.go
@@ -35,7 +35,7 @@ const (
type Node struct {
GenesisBlock string
- NetworkID float64
+ NetworkID string
ID string
ClientName string
}
diff --git a/pkg/eth/datastore/postgres/repositories/block_repository.go b/pkg/eth/datastore/postgres/repositories/block_repository.go
deleted file mode 100644
index 718af605..00000000
--- a/pkg/eth/datastore/postgres/repositories/block_repository.go
+++ /dev/null
@@ -1,344 +0,0 @@
-// VulcanizeDB
-// Copyright © 2019 Vulcanize
-
-// This program is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Affero General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-
-// This program is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Affero General Public License for more details.
-
-// You should have received a copy of the GNU Affero General Public License
-// along with this program. If not, see .
-
-package repositories
-
-import (
- "database/sql"
- "errors"
-
- "github.com/jmoiron/sqlx"
- "github.com/sirupsen/logrus"
-
- "github.com/vulcanize/vulcanizedb/pkg/eth/core"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
-)
-
-const (
- blocksFromHeadBeforeFinal = 20
-)
-
-var ErrBlockExists = errors.New("Won't add block that already exists.")
-
-type BlockRepository struct {
- database *postgres.DB
-}
-
-func NewBlockRepository(database *postgres.DB) *BlockRepository {
- return &BlockRepository{database: database}
-}
-
-func (blockRepository BlockRepository) SetBlocksStatus(chainHead int64) error {
- cutoff := chainHead - blocksFromHeadBeforeFinal
- _, err := blockRepository.database.Exec(`
- UPDATE eth_blocks SET is_final = TRUE
- WHERE is_final = FALSE AND number < $1`,
- cutoff)
-
- return err
-}
-
-func (blockRepository BlockRepository) CreateOrUpdateBlock(block core.Block) (int64, error) {
- var err error
- var blockID int64
- retrievedBlockHash, ok := blockRepository.getBlockHash(block)
- if !ok {
- return blockRepository.insertBlock(block)
- }
- if ok && retrievedBlockHash != block.Hash {
- err = blockRepository.removeBlock(block.Number)
- if err != nil {
- return 0, err
- }
- return blockRepository.insertBlock(block)
- }
- return blockID, ErrBlockExists
-}
-
-func (blockRepository BlockRepository) MissingBlockNumbers(startingBlockNumber int64, highestBlockNumber int64, nodeID string) []int64 {
- numbers := make([]int64, 0)
- err := blockRepository.database.Select(&numbers,
- `SELECT all_block_numbers
- FROM (
- SELECT generate_series($1::INT, $2::INT) AS all_block_numbers) series
- WHERE all_block_numbers NOT IN (
- SELECT number FROM eth_blocks WHERE eth_node_fingerprint = $3
- ) `,
- startingBlockNumber,
- highestBlockNumber, nodeID)
- if err != nil {
- logrus.Error("MissingBlockNumbers: error getting blocks: ", err)
- }
- return numbers
-}
-
-func (blockRepository BlockRepository) GetBlock(blockNumber int64) (core.Block, error) {
- blockRows := blockRepository.database.QueryRowx(
- `SELECT id,
- number,
- gas_limit,
- gas_used,
- time,
- difficulty,
- hash,
- nonce,
- parent_hash,
- size,
- uncle_hash,
- is_final,
- miner,
- extra_data,
- reward,
- uncles_reward
- FROM eth_blocks
- WHERE eth_node_id = $1 AND number = $2`, blockRepository.database.NodeID, blockNumber)
- savedBlock, err := blockRepository.loadBlock(blockRows)
- if err != nil {
- switch err {
- case sql.ErrNoRows:
- return core.Block{}, datastore.ErrBlockDoesNotExist(blockNumber)
- default:
- logrus.Error("GetBlock: error loading blocks: ", err)
- return savedBlock, err
- }
- }
- return savedBlock, nil
-}
-
-func (blockRepository BlockRepository) insertBlock(block core.Block) (int64, error) {
- var blockID int64
- tx, beginErr := blockRepository.database.Beginx()
- if beginErr != nil {
- return 0, postgres.ErrBeginTransactionFailed(beginErr)
- }
- insertBlockErr := tx.QueryRow(
- `INSERT INTO eth_blocks
- (eth_node_id, number, gas_limit, gas_used, time, difficulty, hash, nonce, parent_hash, size, uncle_hash, is_final, miner, extra_data, reward, uncles_reward, eth_node_fingerprint)
- VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17)
- RETURNING id `,
- blockRepository.database.NodeID,
- block.Number,
- block.GasLimit,
- block.GasUsed,
- block.Time,
- block.Difficulty,
- block.Hash,
- block.Nonce,
- block.ParentHash,
- block.Size,
- block.UncleHash,
- block.IsFinal,
- block.Miner,
- block.ExtraData,
- nullStringToZero(block.Reward),
- nullStringToZero(block.UnclesReward),
- blockRepository.database.Node.ID).
- Scan(&blockID)
- if insertBlockErr != nil {
- rollbackErr := tx.Rollback()
- if rollbackErr != nil {
- logrus.Error("failed to rollback transaction: ", rollbackErr)
- }
- return 0, postgres.ErrDBInsertFailed(insertBlockErr)
- }
- if len(block.Uncles) > 0 {
- insertUncleErr := blockRepository.createUncles(tx, blockID, block.Hash, block.Uncles)
- if insertUncleErr != nil {
- tx.Rollback()
- return 0, postgres.ErrDBInsertFailed(insertUncleErr)
- }
- }
- if len(block.Transactions) > 0 {
- insertTxErr := blockRepository.createTransactions(tx, blockID, block.Transactions)
- if insertTxErr != nil {
- rollbackErr := tx.Rollback()
- if rollbackErr != nil {
- logrus.Warn("failed to rollback transaction: ", rollbackErr)
- }
- return 0, postgres.ErrDBInsertFailed(insertTxErr)
- }
- }
- commitErr := tx.Commit()
- if commitErr != nil {
- rollbackErr := tx.Rollback()
- if rollbackErr != nil {
- logrus.Warn("failed to rollback transaction: ", rollbackErr)
- }
- return 0, commitErr
- }
- return blockID, nil
-}
-
-func (blockRepository BlockRepository) createUncles(tx *sqlx.Tx, blockID int64, blockHash string, uncles []core.Uncle) error {
- for _, uncle := range uncles {
- err := blockRepository.createUncle(tx, blockID, uncle)
- if err != nil {
- return err
- }
- }
- return nil
-}
-
-func (blockRepository BlockRepository) createUncle(tx *sqlx.Tx, blockID int64, uncle core.Uncle) error {
- _, err := tx.Exec(
- `INSERT INTO uncles
- (hash, block_id, reward, miner, raw, block_timestamp, eth_node_id, eth_node_fingerprint)
- VALUES ($1, $2, $3, $4, $5, $6, $7::NUMERIC, $8)
- RETURNING id`,
- uncle.Hash, blockID, nullStringToZero(uncle.Reward), uncle.Miner, uncle.Raw, uncle.Timestamp, blockRepository.database.NodeID, blockRepository.database.Node.ID)
- return err
-}
-
-func (blockRepository BlockRepository) createTransactions(tx *sqlx.Tx, blockID int64, transactions []core.TransactionModel) error {
- for _, transaction := range transactions {
- err := blockRepository.createTransaction(tx, blockID, transaction)
- if err != nil {
- return err
- }
- }
- return nil
-}
-
-//Fields like value lose precision if converted to
-//int64 so convert to string instead. But nil
-//big.Int -> string = "" so convert to "0"
-func nullStringToZero(s string) string {
- if s == "" {
- return "0"
- }
- return s
-}
-
-func (blockRepository BlockRepository) createTransaction(tx *sqlx.Tx, blockID int64, transaction core.TransactionModel) error {
- _, err := tx.Exec(
- `INSERT INTO full_sync_transactions
- (block_id, gas_limit, gas_price, hash, input_data, nonce, raw, tx_from, tx_index, tx_to, "value")
- VALUES ($1, $2::NUMERIC, $3::NUMERIC, $4, $5, $6::NUMERIC, $7, $8, $9::NUMERIC, $10, $11::NUMERIC)
- RETURNING id`, blockID, transaction.GasLimit, transaction.GasPrice, transaction.Hash, transaction.Data,
- transaction.Nonce, transaction.Raw, transaction.From, transaction.TxIndex, transaction.To, nullStringToZero(transaction.Value))
- if err != nil {
- return err
- }
- if hasReceipt(transaction) {
- receiptRepo := FullSyncReceiptRepository{}
- receiptID, err := receiptRepo.CreateFullSyncReceiptInTx(blockID, transaction.Receipt, tx)
- if err != nil {
- return err
- }
- if hasLogs(transaction) {
- err = blockRepository.createLogs(tx, transaction.Receipt.Logs, receiptID)
- if err != nil {
- return err
- }
- }
- }
- return nil
-}
-
-func hasLogs(transaction core.TransactionModel) bool {
- return len(transaction.Receipt.Logs) > 0
-}
-
-func hasReceipt(transaction core.TransactionModel) bool {
- return transaction.Receipt.TxHash != ""
-}
-
-func (blockRepository BlockRepository) getBlockHash(block core.Block) (string, bool) {
- var retrievedBlockHash string
- // TODO: handle possible error
- blockRepository.database.Get(&retrievedBlockHash,
- `SELECT hash
- FROM eth_blocks
- WHERE number = $1 AND eth_node_id = $2`,
- block.Number, blockRepository.database.NodeID)
- return retrievedBlockHash, blockExists(retrievedBlockHash)
-}
-
-func (blockRepository BlockRepository) createLogs(tx *sqlx.Tx, logs []core.FullSyncLog, receiptID int64) error {
- for _, tlog := range logs {
- _, err := tx.Exec(
- `INSERT INTO full_sync_logs (block_number, address, tx_hash, index, topic0, topic1, topic2, topic3, data, receipt_id)
- VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)
- `,
- tlog.BlockNumber, tlog.Address, tlog.TxHash, tlog.Index, tlog.Topics[0], tlog.Topics[1], tlog.Topics[2], tlog.Topics[3], tlog.Data, receiptID,
- )
- if err != nil {
- return postgres.ErrDBInsertFailed(err)
- }
- }
- return nil
-}
-
-func blockExists(retrievedBlockHash string) bool {
- return retrievedBlockHash != ""
-}
-
-func (blockRepository BlockRepository) removeBlock(blockNumber int64) error {
- _, err := blockRepository.database.Exec(
- `DELETE FROM eth_blocks WHERE number=$1 AND eth_node_id=$2`,
- blockNumber, blockRepository.database.NodeID)
- if err != nil {
- return postgres.ErrDBDeleteFailed(err)
- }
- return nil
-}
-
-func (blockRepository BlockRepository) loadBlock(blockRows *sqlx.Row) (core.Block, error) {
- type b struct {
- ID int
- core.Block
- }
- var block b
- err := blockRows.StructScan(&block)
- if err != nil {
- logrus.Error("loadBlock: error loading block: ", err)
- return core.Block{}, err
- }
- transactionRows, err := blockRepository.database.Queryx(`
- SELECT hash,
- gas_limit,
- gas_price,
- input_data,
- nonce,
- raw,
- tx_from,
- tx_index,
- tx_to,
- value
- FROM full_sync_transactions
- WHERE block_id = $1
- ORDER BY hash`, block.ID)
- if err != nil {
- logrus.Error("loadBlock: error fetting transactions: ", err)
- return core.Block{}, err
- }
- block.Transactions = blockRepository.LoadTransactions(transactionRows)
- return block.Block, nil
-}
-
-func (blockRepository BlockRepository) LoadTransactions(transactionRows *sqlx.Rows) []core.TransactionModel {
- var transactions []core.TransactionModel
- for transactionRows.Next() {
- var transaction core.TransactionModel
- err := transactionRows.StructScan(&transaction)
- if err != nil {
- logrus.Fatal(err)
- }
- transactions = append(transactions, transaction)
- }
- return transactions
-}
diff --git a/pkg/eth/datastore/postgres/repositories/block_repository_test.go b/pkg/eth/datastore/postgres/repositories/block_repository_test.go
deleted file mode 100644
index 975c8de7..00000000
--- a/pkg/eth/datastore/postgres/repositories/block_repository_test.go
+++ /dev/null
@@ -1,468 +0,0 @@
-// VulcanizeDB
-// Copyright © 2019 Vulcanize
-
-// This program is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Affero General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-
-// This program is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Affero General Public License for more details.
-
-// You should have received a copy of the GNU Affero General Public License
-// along with this program. If not, see .
-
-package repositories_test
-
-import (
- "bytes"
- "math/big"
- "strconv"
-
- "github.com/ethereum/go-ethereum/common"
- "github.com/ethereum/go-ethereum/core/types"
- "github.com/vulcanize/vulcanizedb/pkg/eth/fakes"
-
- . "github.com/onsi/ginkgo"
- . "github.com/onsi/gomega"
- "github.com/vulcanize/vulcanizedb/pkg/eth/core"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres/repositories"
- "github.com/vulcanize/vulcanizedb/test_config"
-)
-
-var _ = Describe("Saving blocks", func() {
- var db *postgres.DB
- var node core.Node
- var blockRepository datastore.BlockRepository
-
- BeforeEach(func() {
- node = core.Node{
- GenesisBlock: "GENESIS",
- NetworkID: 1,
- ID: "b6f90c0fdd8ec9607aed8ee45c69322e47b7063f0bfb7a29c8ecafab24d0a22d24dd2329b5ee6ed4125a03cb14e57fd584e67f9e53e6c631055cbbd82f080845",
- ClientName: "Geth/v1.7.2-stable-1db4ecdc/darwin-amd64/go1.9",
- }
- db = test_config.NewTestDB(node)
- test_config.CleanTestDB(db)
- blockRepository = repositories.NewBlockRepository(db)
-
- })
-
- It("associates blocks to a node", func() {
- block := core.Block{
- Number: 123,
- }
- _, insertErr := blockRepository.CreateOrUpdateBlock(block)
- Expect(insertErr).NotTo(HaveOccurred())
- nodeTwo := core.Node{
- GenesisBlock: "0x456",
- NetworkID: 1,
- ID: "x123456",
- ClientName: "Geth",
- }
- dbTwo := test_config.NewTestDB(nodeTwo)
- test_config.CleanTestDB(dbTwo)
- repositoryTwo := repositories.NewBlockRepository(dbTwo)
-
- _, err := repositoryTwo.GetBlock(123)
- Expect(err).To(HaveOccurred())
- })
-
- It("saves the attributes of the block", func() {
- blockNumber := int64(123)
- gasLimit := uint64(1000000)
- gasUsed := uint64(10)
- blockHash := "x123"
- blockParentHash := "x456"
- blockNonce := "0x881db2ca900682e9a9"
- miner := "x123"
- extraData := "xextraData"
- blockTime := uint64(1508981640)
- uncleHash := "x789"
- blockSize := string("1000")
- difficulty := int64(10)
- blockReward := "5132000000000000000"
- unclesReward := "3580000000000000000"
- block := core.Block{
- Reward: blockReward,
- Difficulty: difficulty,
- GasLimit: gasLimit,
- GasUsed: gasUsed,
- Hash: blockHash,
- ExtraData: extraData,
- Nonce: blockNonce,
- Miner: miner,
- Number: blockNumber,
- ParentHash: blockParentHash,
- Size: blockSize,
- Time: uint64(blockTime),
- UncleHash: uncleHash,
- UnclesReward: unclesReward,
- }
-
- _, insertErr := blockRepository.CreateOrUpdateBlock(block)
-
- Expect(insertErr).NotTo(HaveOccurred())
- savedBlock, err := blockRepository.GetBlock(blockNumber)
- Expect(err).NotTo(HaveOccurred())
- Expect(savedBlock.Reward).To(Equal(blockReward))
- Expect(savedBlock.Difficulty).To(Equal(difficulty))
- Expect(savedBlock.GasLimit).To(Equal(gasLimit))
- Expect(savedBlock.GasUsed).To(Equal(gasUsed))
- Expect(savedBlock.Hash).To(Equal(blockHash))
- Expect(savedBlock.Nonce).To(Equal(blockNonce))
- Expect(savedBlock.Miner).To(Equal(miner))
- Expect(savedBlock.ExtraData).To(Equal(extraData))
- Expect(savedBlock.Number).To(Equal(blockNumber))
- Expect(savedBlock.ParentHash).To(Equal(blockParentHash))
- Expect(savedBlock.Size).To(Equal(blockSize))
- Expect(savedBlock.Time).To(Equal(blockTime))
- Expect(savedBlock.UncleHash).To(Equal(uncleHash))
- Expect(savedBlock.UnclesReward).To(Equal(unclesReward))
- })
-
- It("does not find a block when searching for a number that does not exist", func() {
- _, err := blockRepository.GetBlock(111)
-
- Expect(err).To(HaveOccurred())
- })
-
- It("saves one transaction associated to the block", func() {
- block := core.Block{
- Number: 123,
- Transactions: []core.TransactionModel{fakes.FakeTransaction},
- }
-
- _, insertErr := blockRepository.CreateOrUpdateBlock(block)
-
- Expect(insertErr).NotTo(HaveOccurred())
- savedBlock, getErr := blockRepository.GetBlock(123)
- Expect(getErr).NotTo(HaveOccurred())
- Expect(len(savedBlock.Transactions)).To(Equal(1))
- })
-
- It("saves two transactions associated to the block", func() {
- block := core.Block{
- Number: 123,
- Transactions: []core.TransactionModel{fakes.FakeTransaction, fakes.FakeTransaction},
- }
-
- _, insertErr := blockRepository.CreateOrUpdateBlock(block)
-
- Expect(insertErr).NotTo(HaveOccurred())
- savedBlock, getErr := blockRepository.GetBlock(123)
- Expect(getErr).NotTo(HaveOccurred())
- Expect(len(savedBlock.Transactions)).To(Equal(2))
- })
-
- It("saves one uncle associated to the block", func() {
- fakeUncle := fakes.GetFakeUncle(common.BytesToHash([]byte{1, 2, 3}).String(), "100000")
- block := core.Block{
- Hash: fakes.FakeHash.String(),
- Number: 123,
- Transactions: []core.TransactionModel{fakes.FakeTransaction},
- Uncles: []core.Uncle{fakeUncle},
- UnclesReward: "156250000000000000",
- }
-
- id, insertErr := blockRepository.CreateOrUpdateBlock(block)
-
- Expect(insertErr).NotTo(HaveOccurred())
- savedBlock, getErr := blockRepository.GetBlock(123)
- Expect(getErr).NotTo(HaveOccurred())
- Expect(len(savedBlock.Transactions)).To(Equal(1))
- Expect(savedBlock.UnclesReward).To(Equal(big.NewInt(0).Div(big.NewInt(5000000000000000000), big.NewInt(32)).String()))
-
- var uncleModel core.Uncle
- err := db.Get(&uncleModel, `SELECT hash, reward, miner, raw, block_timestamp FROM uncles
- WHERE block_id = $1 AND hash = $2`, id, common.BytesToHash([]byte{1, 2, 3}).Hex())
- Expect(err).ToNot(HaveOccurred())
- Expect(uncleModel.Hash).To(Equal(fakeUncle.Hash))
- Expect(uncleModel.Reward).To(Equal(fakeUncle.Reward))
- Expect(uncleModel.Miner).To(Equal(fakeUncle.Miner))
- Expect(uncleModel.Timestamp).To(Equal(fakeUncle.Timestamp))
- })
-
- It("saves two uncles associated to the block", func() {
- fakeUncleOne := fakes.GetFakeUncle(common.BytesToHash([]byte{1, 2, 3}).String(), "100000")
- fakeUncleTwo := fakes.GetFakeUncle(common.BytesToHash([]byte{3, 2, 1}).String(), "90000")
- block := core.Block{
- Hash: fakes.FakeHash.String(),
- Number: 123,
- Transactions: []core.TransactionModel{fakes.FakeTransaction},
- Uncles: []core.Uncle{fakeUncleOne, fakeUncleTwo},
- UnclesReward: "312500000000000000",
- }
-
- id, insertErr := blockRepository.CreateOrUpdateBlock(block)
-
- Expect(insertErr).NotTo(HaveOccurred())
- savedBlock, getErr := blockRepository.GetBlock(123)
- Expect(getErr).NotTo(HaveOccurred())
- Expect(len(savedBlock.Transactions)).To(Equal(1))
- b := new(big.Int)
- b.SetString("10000000000000000000", 10)
- Expect(savedBlock.UnclesReward).To(Equal(big.NewInt(0).Div(b, big.NewInt(32)).String()))
-
- var uncleModel core.Uncle
- err := db.Get(&uncleModel, `SELECT hash, reward, miner, raw, block_timestamp FROM uncles
- WHERE block_id = $1 AND hash = $2`, id, common.BytesToHash([]byte{1, 2, 3}).Hex())
- Expect(err).ToNot(HaveOccurred())
- Expect(uncleModel.Hash).To(Equal(fakeUncleOne.Hash))
- Expect(uncleModel.Reward).To(Equal(fakeUncleOne.Reward))
- Expect(uncleModel.Miner).To(Equal(fakeUncleOne.Miner))
- Expect(uncleModel.Timestamp).To(Equal(fakeUncleOne.Timestamp))
-
- err = db.Get(&uncleModel, `SELECT hash, reward, miner, raw, block_timestamp FROM uncles
- WHERE block_id = $1 AND hash = $2`, id, common.BytesToHash([]byte{3, 2, 1}).Hex())
- Expect(err).ToNot(HaveOccurred())
- Expect(uncleModel.Hash).To(Equal(fakeUncleTwo.Hash))
- Expect(uncleModel.Reward).To(Equal(fakeUncleTwo.Reward))
- Expect(uncleModel.Miner).To(Equal(fakeUncleTwo.Miner))
- Expect(uncleModel.Timestamp).To(Equal(fakeUncleTwo.Timestamp))
- })
-
- It(`replaces blocks and transactions associated to the block
- when a more new block is in conflict (same block number + nodeid)`, func() {
- blockOne := core.Block{
- Number: 123,
- Hash: "xabc",
- Transactions: []core.TransactionModel{
- fakes.GetFakeTransaction("x123", core.Receipt{}),
- fakes.GetFakeTransaction("x345", core.Receipt{}),
- },
- }
- blockTwo := core.Block{
- Number: 123,
- Hash: "xdef",
- Transactions: []core.TransactionModel{
- fakes.GetFakeTransaction("x678", core.Receipt{}),
- fakes.GetFakeTransaction("x9ab", core.Receipt{}),
- },
- }
-
- _, insertErrOne := blockRepository.CreateOrUpdateBlock(blockOne)
- Expect(insertErrOne).NotTo(HaveOccurred())
- _, insertErrTwo := blockRepository.CreateOrUpdateBlock(blockTwo)
- Expect(insertErrTwo).NotTo(HaveOccurred())
-
- savedBlock, _ := blockRepository.GetBlock(123)
- Expect(len(savedBlock.Transactions)).To(Equal(2))
- Expect(savedBlock.Transactions[0].Hash).To(Equal("x678"))
- Expect(savedBlock.Transactions[1].Hash).To(Equal("x9ab"))
- })
-
- It(`does not replace blocks when block number is not unique
- but block number + node id is`, func() {
- blockOne := core.Block{
- Number: 123,
- Transactions: []core.TransactionModel{
- fakes.GetFakeTransaction("x123", core.Receipt{}),
- fakes.GetFakeTransaction("x345", core.Receipt{}),
- },
- }
- blockTwo := core.Block{
- Number: 123,
- Transactions: []core.TransactionModel{
- fakes.GetFakeTransaction("x678", core.Receipt{}),
- fakes.GetFakeTransaction("x9ab", core.Receipt{}),
- },
- }
- _, insertErrOne := blockRepository.CreateOrUpdateBlock(blockOne)
- Expect(insertErrOne).NotTo(HaveOccurred())
- nodeTwo := core.Node{
- GenesisBlock: "0x456",
- NetworkID: 1,
- }
- dbTwo := test_config.NewTestDB(nodeTwo)
- test_config.CleanTestDB(dbTwo)
- repositoryTwo := repositories.NewBlockRepository(dbTwo)
-
- _, insertErrTwo := blockRepository.CreateOrUpdateBlock(blockOne)
- Expect(insertErrTwo).NotTo(HaveOccurred())
- _, insertErrThree := repositoryTwo.CreateOrUpdateBlock(blockTwo)
- Expect(insertErrThree).NotTo(HaveOccurred())
- retrievedBlockOne, getErrOne := blockRepository.GetBlock(123)
- Expect(getErrOne).NotTo(HaveOccurred())
- retrievedBlockTwo, getErrTwo := repositoryTwo.GetBlock(123)
- Expect(getErrTwo).NotTo(HaveOccurred())
-
- Expect(retrievedBlockOne.Transactions[0].Hash).To(Equal("x123"))
- Expect(retrievedBlockTwo.Transactions[0].Hash).To(Equal("x678"))
- })
-
- It("returns 'block exists' error if attempting to add duplicate block", func() {
- block := core.Block{
- Number: 12345,
- Hash: "0x12345",
- }
-
- _, err := blockRepository.CreateOrUpdateBlock(block)
-
- Expect(err).NotTo(HaveOccurred())
-
- _, err = blockRepository.CreateOrUpdateBlock(block)
-
- Expect(err).To(HaveOccurred())
- Expect(err).To(MatchError(repositories.ErrBlockExists))
- })
-
- It("saves the attributes associated to a transaction", func() {
- gasLimit := uint64(5000)
- gasPrice := int64(3)
- nonce := uint64(10000)
- to := "1234567890"
- from := "0987654321"
- var value = new(big.Int)
- value.SetString("34940183920000000000", 10)
- inputData := "0xf7d8c8830000000000000000000000000000000000000000000000000000000000037788000000000000000000000000000000000000000000000000000000000003bd14"
- gethTransaction := types.NewTransaction(nonce, common.HexToAddress(to), value, gasLimit, big.NewInt(gasPrice), common.FromHex(inputData))
- var raw bytes.Buffer
- rlpErr := gethTransaction.EncodeRLP(&raw)
- Expect(rlpErr).NotTo(HaveOccurred())
- transaction := core.TransactionModel{
- Data: common.Hex2Bytes(inputData),
- From: from,
- GasLimit: gasLimit,
- GasPrice: gasPrice,
- Hash: "x1234",
- Nonce: nonce,
- Receipt: core.Receipt{},
- To: to,
- TxIndex: 2,
- Value: value.String(),
- Raw: []byte{},
- }
- block := core.Block{
- Number: 123,
- Transactions: []core.TransactionModel{transaction},
- }
-
- _, insertErr := blockRepository.CreateOrUpdateBlock(block)
- Expect(insertErr).NotTo(HaveOccurred())
-
- savedBlock, err := blockRepository.GetBlock(123)
- Expect(err).NotTo(HaveOccurred())
- Expect(len(savedBlock.Transactions)).To(Equal(1))
- savedTransaction := savedBlock.Transactions[0]
- Expect(savedTransaction).To(Equal(transaction))
- })
-
- Describe("The missing block numbers", func() {
- It("is empty the starting block number is the highest known block number", func() {
- _, insertErr := blockRepository.CreateOrUpdateBlock(core.Block{Number: 1})
-
- Expect(insertErr).NotTo(HaveOccurred())
- Expect(len(blockRepository.MissingBlockNumbers(1, 1, node.ID))).To(Equal(0))
- })
-
- It("is empty if copies of block exist from both current node and another", func() {
- _, insertErrOne := blockRepository.CreateOrUpdateBlock(core.Block{Number: 0})
- Expect(insertErrOne).NotTo(HaveOccurred())
- _, insertErrTwo := blockRepository.CreateOrUpdateBlock(core.Block{Number: 1})
- Expect(insertErrTwo).NotTo(HaveOccurred())
- nodeTwo := core.Node{
- GenesisBlock: "0x456",
- NetworkID: 1,
- }
- dbTwo, err := postgres.NewDB(test_config.DBConfig, nodeTwo)
- Expect(err).NotTo(HaveOccurred())
- repositoryTwo := repositories.NewBlockRepository(dbTwo)
- _, insertErrThree := repositoryTwo.CreateOrUpdateBlock(core.Block{Number: 0})
- Expect(insertErrThree).NotTo(HaveOccurred())
-
- missing := blockRepository.MissingBlockNumbers(0, 1, node.ID)
-
- Expect(len(missing)).To(BeZero())
- })
-
- It("is the only missing block number", func() {
- _, insertErr := blockRepository.CreateOrUpdateBlock(core.Block{Number: 2})
- Expect(insertErr).NotTo(HaveOccurred())
-
- Expect(blockRepository.MissingBlockNumbers(1, 2, node.ID)).To(Equal([]int64{1}))
- })
-
- It("is both missing block numbers", func() {
- _, insertErr := blockRepository.CreateOrUpdateBlock(core.Block{Number: 3})
- Expect(insertErr).NotTo(HaveOccurred())
-
- Expect(blockRepository.MissingBlockNumbers(1, 3, node.ID)).To(Equal([]int64{1, 2}))
- })
-
- It("goes back to the starting block number", func() {
- _, insertErr := blockRepository.CreateOrUpdateBlock(core.Block{Number: 6})
- Expect(insertErr).NotTo(HaveOccurred())
-
- Expect(blockRepository.MissingBlockNumbers(4, 6, node.ID)).To(Equal([]int64{4, 5}))
- })
-
- It("only includes missing block numbers", func() {
- _, insertErrOne := blockRepository.CreateOrUpdateBlock(core.Block{Number: 4})
- Expect(insertErrOne).NotTo(HaveOccurred())
- _, insertErrTwo := blockRepository.CreateOrUpdateBlock(core.Block{Number: 6})
- Expect(insertErrTwo).NotTo(HaveOccurred())
-
- Expect(blockRepository.MissingBlockNumbers(4, 6, node.ID)).To(Equal([]int64{5}))
- })
-
- It("includes blocks created by a different node", func() {
- _, insertErrOne := blockRepository.CreateOrUpdateBlock(core.Block{Number: 4})
- Expect(insertErrOne).NotTo(HaveOccurred())
- _, insertErrTwo := blockRepository.CreateOrUpdateBlock(core.Block{Number: 6})
- Expect(insertErrTwo).NotTo(HaveOccurred())
-
- Expect(blockRepository.MissingBlockNumbers(4, 6, "Different node id")).To(Equal([]int64{4, 5, 6}))
- })
-
- It("is a list with multiple gaps", func() {
- _, insertErrOne := blockRepository.CreateOrUpdateBlock(core.Block{Number: 4})
- Expect(insertErrOne).NotTo(HaveOccurred())
- _, insertErrTwo := blockRepository.CreateOrUpdateBlock(core.Block{Number: 5})
- Expect(insertErrTwo).NotTo(HaveOccurred())
- _, insertErrThree := blockRepository.CreateOrUpdateBlock(core.Block{Number: 8})
- Expect(insertErrThree).NotTo(HaveOccurred())
- _, insertErrFour := blockRepository.CreateOrUpdateBlock(core.Block{Number: 10})
- Expect(insertErrFour).NotTo(HaveOccurred())
-
- Expect(blockRepository.MissingBlockNumbers(3, 10, node.ID)).To(Equal([]int64{3, 6, 7, 9}))
- })
-
- It("returns empty array when lower bound exceeds upper bound", func() {
- Expect(blockRepository.MissingBlockNumbers(10000, 1, node.ID)).To(Equal([]int64{}))
- })
-
- It("only returns requested range even when other gaps exist", func() {
- _, insertErrOne := blockRepository.CreateOrUpdateBlock(core.Block{Number: 3})
- Expect(insertErrOne).NotTo(HaveOccurred())
- _, insertErrTwo := blockRepository.CreateOrUpdateBlock(core.Block{Number: 8})
- Expect(insertErrTwo).NotTo(HaveOccurred())
-
- Expect(blockRepository.MissingBlockNumbers(1, 5, node.ID)).To(Equal([]int64{1, 2, 4, 5}))
- })
- })
-
- Describe("The block status", func() {
- It("sets the status of blocks within n-20 of chain HEAD as final", func() {
- blockNumberOfChainHead := 25
- for i := 0; i < blockNumberOfChainHead; i++ {
- _, err := blockRepository.CreateOrUpdateBlock(core.Block{Number: int64(i), Hash: strconv.Itoa(i)})
- Expect(err).NotTo(HaveOccurred())
- }
-
- setErr := blockRepository.SetBlocksStatus(int64(blockNumberOfChainHead))
-
- Expect(setErr).NotTo(HaveOccurred())
- blockOne, err := blockRepository.GetBlock(1)
- Expect(err).ToNot(HaveOccurred())
- Expect(blockOne.IsFinal).To(Equal(true))
- blockTwo, err := blockRepository.GetBlock(24)
- Expect(err).ToNot(HaveOccurred())
- Expect(blockTwo.IsFinal).To(BeFalse())
- })
- })
-})
diff --git a/pkg/eth/datastore/postgres/repositories/checked_headers_repository.go b/pkg/eth/datastore/postgres/repositories/checked_headers_repository.go
index 14009c42..b18fdff6 100644
--- a/pkg/eth/datastore/postgres/repositories/checked_headers_repository.go
+++ b/pkg/eth/datastore/postgres/repositories/checked_headers_repository.go
@@ -18,7 +18,7 @@ package repositories
import (
"github.com/vulcanize/vulcanizedb/pkg/eth/core"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
)
const (
diff --git a/pkg/eth/datastore/postgres/repositories/checked_headers_repository_test.go b/pkg/eth/datastore/postgres/repositories/checked_headers_repository_test.go
index b14eee17..ee6f60ac 100644
--- a/pkg/eth/datastore/postgres/repositories/checked_headers_repository_test.go
+++ b/pkg/eth/datastore/postgres/repositories/checked_headers_repository_test.go
@@ -23,9 +23,9 @@ import (
. "github.com/onsi/gomega"
"github.com/vulcanize/vulcanizedb/pkg/eth/core"
"github.com/vulcanize/vulcanizedb/pkg/eth/datastore"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
"github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres/repositories"
"github.com/vulcanize/vulcanizedb/pkg/eth/fakes"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
"github.com/vulcanize/vulcanizedb/test_config"
)
diff --git a/pkg/eth/datastore/postgres/repositories/checked_logs_repository.go b/pkg/eth/datastore/postgres/repositories/checked_logs_repository.go
deleted file mode 100644
index 1fecdcf0..00000000
--- a/pkg/eth/datastore/postgres/repositories/checked_logs_repository.go
+++ /dev/null
@@ -1,69 +0,0 @@
-// VulcanizeDB
-// Copyright © 2019 Vulcanize
-
-// This program is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Affero General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-
-// This program is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Affero General Public License for more details.
-
-// You should have received a copy of the GNU Affero General Public License
-// along with this program. If not, see .
-
-package repositories
-
-import (
- "github.com/sirupsen/logrus"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
-)
-
-type CheckedLogsRepository struct {
- db *postgres.DB
-}
-
-func NewCheckedLogsRepository(db *postgres.DB) CheckedLogsRepository {
- return CheckedLogsRepository{db: db}
-}
-
-// Return whether a given address + topic0 has been fetched on a previous run of vDB
-func (repository CheckedLogsRepository) AlreadyWatchingLog(addresses []string, topic0 string) (bool, error) {
- for _, address := range addresses {
- var addressExists bool
- getAddressExistsErr := repository.db.Get(&addressExists, `SELECT EXISTS(SELECT 1 FROM public.watched_logs WHERE contract_address = $1)`, address)
- if getAddressExistsErr != nil {
- return false, getAddressExistsErr
- }
- if !addressExists {
- return false, nil
- }
- }
- var topicZeroExists bool
- getTopicZeroExistsErr := repository.db.Get(&topicZeroExists, `SELECT EXISTS(SELECT 1 FROM public.watched_logs WHERE topic_zero = $1)`, topic0)
- if getTopicZeroExistsErr != nil {
- return false, getTopicZeroExistsErr
- }
- return topicZeroExists, nil
-}
-
-// Persist that a given address + topic0 has is being fetched on this run of vDB
-func (repository CheckedLogsRepository) MarkLogWatched(addresses []string, topic0 string) error {
- tx, txErr := repository.db.Beginx()
- if txErr != nil {
- return txErr
- }
- for _, address := range addresses {
- _, insertErr := tx.Exec(`INSERT INTO public.watched_logs (contract_address, topic_zero) VALUES ($1, $2)`, address, topic0)
- if insertErr != nil {
- rollbackErr := tx.Rollback()
- if rollbackErr != nil {
- logrus.Errorf("error rolling back transaction inserting checked logs: %s", rollbackErr.Error())
- }
- return insertErr
- }
- }
- return tx.Commit()
-}
diff --git a/pkg/eth/datastore/postgres/repositories/checked_logs_repository_test.go b/pkg/eth/datastore/postgres/repositories/checked_logs_repository_test.go
deleted file mode 100644
index 150d5803..00000000
--- a/pkg/eth/datastore/postgres/repositories/checked_logs_repository_test.go
+++ /dev/null
@@ -1,115 +0,0 @@
-// VulcanizeDB
-// Copyright © 2019 Vulcanize
-
-// This program is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Affero General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-
-// This program is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Affero General Public License for more details.
-
-// You should have received a copy of the GNU Affero General Public License
-// along with this program. If not, see .
-
-package repositories_test
-
-import (
- "github.com/ethereum/go-ethereum/common"
- . "github.com/onsi/ginkgo"
- . "github.com/onsi/gomega"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres/repositories"
- "github.com/vulcanize/vulcanizedb/pkg/eth/fakes"
- "github.com/vulcanize/vulcanizedb/test_config"
-)
-
-var _ = Describe("Checked logs repository", func() {
- var (
- db *postgres.DB
- fakeAddress = fakes.FakeAddress.Hex()
- fakeAddresses = []string{fakeAddress}
- fakeTopicZero = fakes.FakeHash.Hex()
- repository datastore.CheckedLogsRepository
- )
-
- BeforeEach(func() {
- db = test_config.NewTestDB(test_config.NewTestNode())
- test_config.CleanTestDB(db)
- repository = repositories.NewCheckedLogsRepository(db)
- })
-
- AfterEach(func() {
- closeErr := db.Close()
- Expect(closeErr).NotTo(HaveOccurred())
- })
-
- Describe("AlreadyWatchingLog", func() {
- It("returns true if all addresses and the topic0 are already present in the db", func() {
- _, insertErr := db.Exec(`INSERT INTO public.watched_logs (contract_address, topic_zero) VALUES ($1, $2)`, fakeAddress, fakeTopicZero)
- Expect(insertErr).NotTo(HaveOccurred())
-
- hasBeenChecked, err := repository.AlreadyWatchingLog(fakeAddresses, fakeTopicZero)
-
- Expect(err).NotTo(HaveOccurred())
- Expect(hasBeenChecked).To(BeTrue())
- })
-
- It("returns true if addresses and topic0 were fetched because of a combination of other transformers", func() {
- anotherFakeAddress := common.HexToAddress("0x" + fakes.RandomString(40)).Hex()
- anotherFakeTopicZero := common.HexToHash("0x" + fakes.RandomString(64)).Hex()
- // insert row with matching address but different topic0
- _, insertOneErr := db.Exec(`INSERT INTO public.watched_logs (contract_address, topic_zero) VALUES ($1, $2)`, fakeAddress, anotherFakeTopicZero)
- Expect(insertOneErr).NotTo(HaveOccurred())
- // insert row with matching topic0 but different address
- _, insertTwoErr := db.Exec(`INSERT INTO public.watched_logs (contract_address, topic_zero) VALUES ($1, $2)`, anotherFakeAddress, fakeTopicZero)
- Expect(insertTwoErr).NotTo(HaveOccurred())
-
- hasBeenChecked, err := repository.AlreadyWatchingLog(fakeAddresses, fakeTopicZero)
-
- Expect(err).NotTo(HaveOccurred())
- Expect(hasBeenChecked).To(BeTrue())
- })
-
- It("returns false if any address has not been checked", func() {
- anotherFakeAddress := common.HexToAddress("0x" + fakes.RandomString(40)).Hex()
- _, insertErr := db.Exec(`INSERT INTO public.watched_logs (contract_address, topic_zero) VALUES ($1, $2)`, fakeAddress, fakeTopicZero)
- Expect(insertErr).NotTo(HaveOccurred())
-
- hasBeenChecked, err := repository.AlreadyWatchingLog(append(fakeAddresses, anotherFakeAddress), fakeTopicZero)
-
- Expect(err).NotTo(HaveOccurred())
- Expect(hasBeenChecked).To(BeFalse())
- })
-
- It("returns false if topic0 has not been checked", func() {
- anotherFakeTopicZero := common.HexToHash("0x" + fakes.RandomString(64)).Hex()
- _, insertErr := db.Exec(`INSERT INTO public.watched_logs (contract_address, topic_zero) VALUES ($1, $2)`, fakeAddress, anotherFakeTopicZero)
- Expect(insertErr).NotTo(HaveOccurred())
-
- hasBeenChecked, err := repository.AlreadyWatchingLog(fakeAddresses, fakeTopicZero)
-
- Expect(err).NotTo(HaveOccurred())
- Expect(hasBeenChecked).To(BeFalse())
- })
- })
-
- Describe("MarkLogWatched", func() {
- It("adds a row for all of transformer's addresses + topic0", func() {
- anotherFakeAddress := common.HexToAddress("0x" + fakes.RandomString(40)).Hex()
- err := repository.MarkLogWatched(append(fakeAddresses, anotherFakeAddress), fakeTopicZero)
-
- Expect(err).NotTo(HaveOccurred())
- var comboOneExists, comboTwoExists bool
- getComboOneErr := db.Get(&comboOneExists, `SELECT EXISTS(SELECT 1 FROM public.watched_logs WHERE contract_address = $1 AND topic_zero = $2)`, fakeAddress, fakeTopicZero)
- Expect(getComboOneErr).NotTo(HaveOccurred())
- Expect(comboOneExists).To(BeTrue())
- getComboTwoErr := db.Get(&comboTwoExists, `SELECT EXISTS(SELECT 1 FROM public.watched_logs WHERE contract_address = $1 AND topic_zero = $2)`, anotherFakeAddress, fakeTopicZero)
- Expect(getComboTwoErr).NotTo(HaveOccurred())
- Expect(comboTwoExists).To(BeTrue())
- })
- })
-})
diff --git a/pkg/eth/datastore/postgres/repositories/contract_repository.go b/pkg/eth/datastore/postgres/repositories/contract_repository.go
deleted file mode 100644
index deed9184..00000000
--- a/pkg/eth/datastore/postgres/repositories/contract_repository.go
+++ /dev/null
@@ -1,99 +0,0 @@
-// VulcanizeDB
-// Copyright © 2019 Vulcanize
-
-// This program is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Affero General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-
-// This program is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Affero General Public License for more details.
-
-// You should have received a copy of the GNU Affero General Public License
-// along with this program. If not, see .
-
-package repositories
-
-import (
- "database/sql"
-
- "github.com/vulcanize/vulcanizedb/pkg/eth/core"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
-)
-
-type ContractRepository struct {
- *postgres.DB
-}
-
-func (contractRepository ContractRepository) CreateContract(contract core.Contract) error {
- abi := contract.Abi
- var abiToInsert *string
- if abi != "" {
- abiToInsert = &abi
- }
- _, err := contractRepository.DB.Exec(
- `INSERT INTO watched_contracts (contract_hash, contract_abi)
- VALUES ($1, $2)
- ON CONFLICT (contract_hash)
- DO UPDATE
- SET contract_hash = $1, contract_abi = $2
- `, contract.Hash, abiToInsert)
- if err != nil {
- return postgres.ErrDBInsertFailed(err)
- }
- return nil
-}
-
-func (contractRepository ContractRepository) ContractExists(contractHash string) (bool, error) {
- var exists bool
- err := contractRepository.DB.QueryRow(
- `SELECT exists(
- SELECT 1
- FROM watched_contracts
- WHERE contract_hash = $1)`, contractHash).Scan(&exists)
- if err != nil {
- return false, err
- }
- return exists, nil
-}
-
-func (contractRepository ContractRepository) GetContract(contractHash string) (core.Contract, error) {
- var hash string
- var abi string
- contract := contractRepository.DB.QueryRow(
- `SELECT contract_hash, contract_abi FROM watched_contracts WHERE contract_hash=$1`, contractHash)
- err := contract.Scan(&hash, &abi)
- if err == sql.ErrNoRows {
- return core.Contract{}, datastore.ErrContractDoesNotExist(contractHash)
- }
- savedContract, err := contractRepository.addTransactions(core.Contract{Hash: hash, Abi: abi})
- if err != nil {
- return core.Contract{}, err
- }
- return savedContract, nil
-}
-
-func (contractRepository ContractRepository) addTransactions(contract core.Contract) (core.Contract, error) {
- transactionRows, err := contractRepository.DB.Queryx(`
- SELECT hash,
- nonce,
- tx_to,
- tx_from,
- gas_limit,
- gas_price,
- value,
- input_data
- FROM full_sync_transactions
- WHERE tx_to = $1
- ORDER BY block_id DESC`, contract.Hash)
- if err != nil {
- return core.Contract{}, err
- }
- blockRepository := &BlockRepository{contractRepository.DB}
- transactions := blockRepository.LoadTransactions(transactionRows)
- savedContract := core.Contract{Hash: contract.Hash, Transactions: transactions, Abi: contract.Abi}
- return savedContract, nil
-}
diff --git a/pkg/eth/datastore/postgres/repositories/contract_repository_test.go b/pkg/eth/datastore/postgres/repositories/contract_repository_test.go
deleted file mode 100644
index 9549c579..00000000
--- a/pkg/eth/datastore/postgres/repositories/contract_repository_test.go
+++ /dev/null
@@ -1,122 +0,0 @@
-// VulcanizeDB
-// Copyright © 2019 Vulcanize
-
-// This program is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Affero General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-
-// This program is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Affero General Public License for more details.
-
-// You should have received a copy of the GNU Affero General Public License
-// along with this program. If not, see .
-
-package repositories_test
-
-import (
- "sort"
-
- . "github.com/onsi/ginkgo"
- . "github.com/onsi/gomega"
- "github.com/vulcanize/vulcanizedb/pkg/eth/core"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres/repositories"
- "github.com/vulcanize/vulcanizedb/test_config"
-)
-
-var _ = Describe("Creating contracts", func() {
- var db *postgres.DB
- var contractRepository datastore.ContractRepository
- var node core.Node
-
- BeforeEach(func() {
- node = core.Node{
- GenesisBlock: "GENESIS",
- NetworkID: 1,
- ID: "b6f90c0fdd8ec9607aed8ee45c69322e47b7063f0bfb7a29c8ecafab24d0a22d24dd2329b5ee6ed4125a03cb14e57fd584e67f9e53e6c631055cbbd82f080845",
- ClientName: "Geth/v1.7.2-stable-1db4ecdc/darwin-amd64/go1.9",
- }
- db = test_config.NewTestDB(node)
- test_config.CleanTestDB(db)
- contractRepository = repositories.ContractRepository{DB: db}
- })
-
- It("returns the contract when it exists", func() {
- contractRepository.CreateContract(core.Contract{Hash: "x123"})
-
- contract, err := contractRepository.GetContract("x123")
- Expect(err).NotTo(HaveOccurred())
- Expect(contract.Hash).To(Equal("x123"))
-
- Expect(contractRepository.ContractExists("x123")).To(BeTrue())
- Expect(contractRepository.ContractExists("x456")).To(BeFalse())
- })
-
- It("returns err if contract does not exist", func() {
- _, err := contractRepository.GetContract("x123")
- Expect(err).To(HaveOccurred())
- })
-
- It("returns empty array when no transactions 'To' a contract", func() {
- contractRepository.CreateContract(core.Contract{Hash: "x123"})
- contract, err := contractRepository.GetContract("x123")
- Expect(err).ToNot(HaveOccurred())
- Expect(contract.Transactions).To(BeEmpty())
- })
-
- It("returns transactions 'To' a contract", func() {
- var blockRepository datastore.BlockRepository
- blockRepository = repositories.NewBlockRepository(db)
- block := core.Block{
- Number: 123,
- Transactions: []core.TransactionModel{
- {Hash: "TRANSACTION1", To: "x123", Value: "0"},
- {Hash: "TRANSACTION2", To: "x345", Value: "0"},
- {Hash: "TRANSACTION3", To: "x123", Value: "0"},
- },
- }
- _, insertBlockErr := blockRepository.CreateOrUpdateBlock(block)
- Expect(insertBlockErr).NotTo(HaveOccurred())
-
- insertContractErr := contractRepository.CreateContract(core.Contract{Hash: "x123"})
- Expect(insertContractErr).NotTo(HaveOccurred())
- contract, err := contractRepository.GetContract("x123")
- Expect(err).ToNot(HaveOccurred())
- sort.Slice(contract.Transactions, func(i, j int) bool {
- return contract.Transactions[i].Hash < contract.Transactions[j].Hash
- })
- Expect(contract.Transactions).To(
- Equal([]core.TransactionModel{
- {Data: []byte{}, Hash: "TRANSACTION1", To: "x123", Value: "0"},
- {Data: []byte{}, Hash: "TRANSACTION3", To: "x123", Value: "0"},
- }))
- })
-
- It("stores the ABI of the contract", func() {
- contractRepository.CreateContract(core.Contract{
- Abi: "{\"some\": \"json\"}",
- Hash: "x123",
- })
- contract, err := contractRepository.GetContract("x123")
- Expect(err).ToNot(HaveOccurred())
- Expect(contract.Abi).To(Equal("{\"some\": \"json\"}"))
- })
-
- It("updates the ABI of the contract if hash already present", func() {
- contractRepository.CreateContract(core.Contract{
- Abi: "{\"some\": \"json\"}",
- Hash: "x123",
- })
- contractRepository.CreateContract(core.Contract{
- Abi: "{\"some\": \"different json\"}",
- Hash: "x123",
- })
- contract, err := contractRepository.GetContract("x123")
- Expect(err).ToNot(HaveOccurred())
- Expect(contract.Abi).To(Equal("{\"some\": \"different json\"}"))
- })
-})
diff --git a/pkg/eth/datastore/postgres/repositories/full_sync_log_repository.go b/pkg/eth/datastore/postgres/repositories/full_sync_log_repository.go
deleted file mode 100644
index cd01a0b1..00000000
--- a/pkg/eth/datastore/postgres/repositories/full_sync_log_repository.go
+++ /dev/null
@@ -1,106 +0,0 @@
-// VulcanizeDB
-// Copyright © 2019 Vulcanize
-
-// This program is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Affero General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-
-// This program is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Affero General Public License for more details.
-
-// You should have received a copy of the GNU Affero General Public License
-// along with this program. If not, see .
-
-package repositories
-
-import (
- "database/sql"
-
- "github.com/sirupsen/logrus"
- "github.com/vulcanize/vulcanizedb/pkg/eth/core"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
-)
-
-type FullSyncLogRepository struct {
- *postgres.DB
-}
-
-func (repository FullSyncLogRepository) CreateLogs(lgs []core.FullSyncLog, receiptID int64) error {
- tx, _ := repository.DB.Beginx()
- for _, tlog := range lgs {
- _, insertLogErr := tx.Exec(
- `INSERT INTO full_sync_logs (block_number, address, tx_hash, index, topic0, topic1, topic2, topic3, data, receipt_id)
- VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)
- `,
- tlog.BlockNumber, tlog.Address, tlog.TxHash, tlog.Index, tlog.Topics[0], tlog.Topics[1], tlog.Topics[2], tlog.Topics[3], tlog.Data, receiptID,
- )
- if insertLogErr != nil {
- rollbackErr := tx.Rollback()
- if rollbackErr != nil {
- logrus.Error("CreateLogs: could not perform rollback: ", rollbackErr)
- }
- return postgres.ErrDBInsertFailed(insertLogErr)
- }
- }
- err := tx.Commit()
- if err != nil {
- err = tx.Rollback()
- if err != nil {
- logrus.Error("CreateLogs: could not perform rollback: ", err)
- }
- return postgres.ErrDBInsertFailed(err)
- }
- return nil
-}
-
-func (repository FullSyncLogRepository) GetLogs(address string, blockNumber int64) ([]core.FullSyncLog, error) {
- logRows, err := repository.DB.Query(
- `SELECT block_number,
- address,
- tx_hash,
- index,
- topic0,
- topic1,
- topic2,
- topic3,
- data
- FROM full_sync_logs
- WHERE address = $1 AND block_number = $2
- ORDER BY block_number DESC`, address, blockNumber)
- if err != nil {
- return []core.FullSyncLog{}, err
- }
- return repository.loadLogs(logRows)
-}
-
-func (repository FullSyncLogRepository) loadLogs(logsRows *sql.Rows) ([]core.FullSyncLog, error) {
- var lgs []core.FullSyncLog
- for logsRows.Next() {
- var blockNumber int64
- var address string
- var txHash string
- var index int64
- var data string
- var topics core.Topics
- err := logsRows.Scan(&blockNumber, &address, &txHash, &index, &topics[0], &topics[1], &topics[2], &topics[3], &data)
- if err != nil {
- logrus.Error("loadLogs: Error scanning a row in logRows: ", err)
- return []core.FullSyncLog{}, err
- }
- lg := core.FullSyncLog{
- BlockNumber: blockNumber,
- TxHash: txHash,
- Address: address,
- Index: index,
- Data: data,
- }
- for i, topic := range topics {
- lg.Topics[i] = topic
- }
- lgs = append(lgs, lg)
- }
- return lgs, nil
-}
diff --git a/pkg/eth/datastore/postgres/repositories/full_sync_log_repository_test.go b/pkg/eth/datastore/postgres/repositories/full_sync_log_repository_test.go
deleted file mode 100644
index 99abc7cd..00000000
--- a/pkg/eth/datastore/postgres/repositories/full_sync_log_repository_test.go
+++ /dev/null
@@ -1,221 +0,0 @@
-// VulcanizeDB
-// Copyright © 2019 Vulcanize
-
-// This program is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Affero General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-
-// This program is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Affero General Public License for more details.
-
-// You should have received a copy of the GNU Affero General Public License
-// along with this program. If not, see .
-
-package repositories_test
-
-import (
- "sort"
-
- "github.com/vulcanize/vulcanizedb/pkg/eth/fakes"
-
- . "github.com/onsi/ginkgo"
- . "github.com/onsi/gomega"
- "github.com/vulcanize/vulcanizedb/pkg/eth/core"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres/repositories"
- "github.com/vulcanize/vulcanizedb/test_config"
-)
-
-var _ = Describe("Full sync log Repository", func() {
- Describe("Saving logs", func() {
- var db *postgres.DB
- var blockRepository datastore.BlockRepository
- var logsRepository datastore.FullSyncLogRepository
- var receiptRepository datastore.FullSyncReceiptRepository
- var node core.Node
-
- BeforeEach(func() {
- node = core.Node{
- GenesisBlock: "GENESIS",
- NetworkID: 1,
- ID: "b6f90c0fdd8ec9607aed8ee45c69322e47b7063f0bfb7a29c8ecafab24d0a22d24dd2329b5ee6ed4125a03cb14e57fd584e67f9e53e6c631055cbbd82f080845",
- ClientName: "Geth/v1.7.2-stable-1db4ecdc/darwin-amd64/go1.9",
- }
- db = test_config.NewTestDB(node)
- test_config.CleanTestDB(db)
- blockRepository = repositories.NewBlockRepository(db)
- logsRepository = repositories.FullSyncLogRepository{DB: db}
- receiptRepository = repositories.FullSyncReceiptRepository{DB: db}
- })
-
- It("returns the log when it exists", func() {
- blockNumber := int64(12345)
- blockId, err := blockRepository.CreateOrUpdateBlock(core.Block{Number: blockNumber})
- Expect(err).NotTo(HaveOccurred())
- tx, _ := db.Beginx()
- receiptId, err := receiptRepository.CreateFullSyncReceiptInTx(blockId, core.Receipt{}, tx)
- tx.Commit()
- Expect(err).NotTo(HaveOccurred())
- err = logsRepository.CreateLogs([]core.FullSyncLog{{
- BlockNumber: blockNumber,
- Index: 0,
- Address: "x123",
- TxHash: "x456",
- Topics: core.Topics{0: "x777", 1: "x888", 2: "x999"},
- Data: "xabc",
- }}, receiptId)
- Expect(err).NotTo(HaveOccurred())
-
- log, err := logsRepository.GetLogs("x123", blockNumber)
-
- Expect(err).NotTo(HaveOccurred())
- Expect(log).NotTo(BeNil())
- Expect(log[0].BlockNumber).To(Equal(blockNumber))
- Expect(log[0].Address).To(Equal("x123"))
- Expect(log[0].Index).To(Equal(int64(0)))
- Expect(log[0].TxHash).To(Equal("x456"))
- Expect(log[0].Topics[0]).To(Equal("x777"))
- Expect(log[0].Topics[1]).To(Equal("x888"))
- Expect(log[0].Topics[2]).To(Equal("x999"))
- Expect(log[0].Data).To(Equal("xabc"))
- })
-
- It("returns nil if log does not exist", func() {
- log, err := logsRepository.GetLogs("x123", 1)
- Expect(err).NotTo(HaveOccurred())
- Expect(log).To(BeNil())
- })
-
- It("filters to the correct block number and address", func() {
- blockNumber := int64(12345)
- blockId, err := blockRepository.CreateOrUpdateBlock(core.Block{Number: blockNumber})
- Expect(err).NotTo(HaveOccurred())
- tx, _ := db.Beginx()
- receiptId, err := receiptRepository.CreateFullSyncReceiptInTx(blockId, core.Receipt{}, tx)
- tx.Commit()
- Expect(err).NotTo(HaveOccurred())
-
- err = logsRepository.CreateLogs([]core.FullSyncLog{{
- BlockNumber: blockNumber,
- Index: 0,
- Address: "x123",
- TxHash: "x456",
- Topics: core.Topics{0: "x777", 1: "x888", 2: "x999"},
- Data: "xabc",
- }}, receiptId)
- Expect(err).NotTo(HaveOccurred())
-
- err = logsRepository.CreateLogs([]core.FullSyncLog{{
- BlockNumber: blockNumber,
- Index: 1,
- Address: "x123",
- TxHash: "x789",
- Topics: core.Topics{0: "x111", 1: "x222", 2: "x333"},
- Data: "xdef",
- }}, receiptId)
- Expect(err).NotTo(HaveOccurred())
-
- err = logsRepository.CreateLogs([]core.FullSyncLog{{
- BlockNumber: 2,
- Index: 0,
- Address: "x123",
- TxHash: "x456",
- Topics: core.Topics{0: "x777", 1: "x888", 2: "x999"},
- Data: "xabc",
- }}, receiptId)
- Expect(err).NotTo(HaveOccurred())
-
- log, err := logsRepository.GetLogs("x123", blockNumber)
- Expect(err).NotTo(HaveOccurred())
-
- type logIndex struct {
- blockNumber int64
- Index int64
- }
-
- var uniqueBlockNumbers []logIndex
- for _, log := range log {
- uniqueBlockNumbers = append(uniqueBlockNumbers,
- logIndex{log.BlockNumber, log.Index})
- }
- sort.Slice(uniqueBlockNumbers, func(i, j int) bool {
- if uniqueBlockNumbers[i].blockNumber < uniqueBlockNumbers[j].blockNumber {
- return true
- }
- if uniqueBlockNumbers[i].blockNumber > uniqueBlockNumbers[j].blockNumber {
- return false
- }
- return uniqueBlockNumbers[i].Index < uniqueBlockNumbers[j].Index
- })
-
- Expect(log).NotTo(BeNil())
- Expect(len(log)).To(Equal(2))
- Expect(uniqueBlockNumbers).To(Equal(
- []logIndex{
- {blockNumber: blockNumber, Index: 0},
- {blockNumber: blockNumber, Index: 1}},
- ))
- })
-
- It("saves the logs attached to a receipt", func() {
- logs := []core.FullSyncLog{{
- Address: "0x8a4774fe82c63484afef97ca8d89a6ea5e21f973",
- BlockNumber: 4745407,
- Data: "0x0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000645a68669900000000000000000000000000000000000000000000003397684ab5869b0000000000000000000000000000000000000000000000000000000000005a36053200000000000000000000000099041f808d598b782d5a3e498681c2452a31da08",
- Index: 86,
- Topics: core.Topics{
- 0: "0x5a68669900000000000000000000000000000000000000000000000000000000",
- 1: "0x000000000000000000000000d0148dad63f73ce6f1b6c607e3413dcf1ff5f030",
- 2: "0x00000000000000000000000000000000000000000000003397684ab5869b0000",
- 3: "0x000000000000000000000000000000000000000000000000000000005a360532",
- },
- TxHash: "0x002c4799161d809b23f67884eb6598c9df5894929fe1a9ead97ca175d360f547",
- }, {
- Address: "0x99041f808d598b782d5a3e498681c2452a31da08",
- BlockNumber: 4745407,
- Data: "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000000418178358",
- Index: 87,
- Topics: core.Topics{
- 0: "0x1817835800000000000000000000000000000000000000000000000000000000",
- 1: "0x0000000000000000000000008a4774fe82c63484afef97ca8d89a6ea5e21f973",
- 2: "0x0000000000000000000000000000000000000000000000000000000000000000",
- 3: "0x0000000000000000000000000000000000000000000000000000000000000000",
- },
- TxHash: "0x002c4799161d809b23f67884eb6598c9df5894929fe1a9ead97ca175d360f547",
- }, {
- Address: "0x99041f808d598b782d5a3e498681c2452a31da08",
- BlockNumber: 4745407,
- Data: "0x00000000000000000000000000000000000000000000003338f64c8423af4000",
- Index: 88,
- Topics: core.Topics{
- 0: "0x296ba4ca62c6c21c95e828080cb8aec7481b71390585605300a8a76f9e95b527",
- },
- TxHash: "0x002c4799161d809b23f67884eb6598c9df5894929fe1a9ead97ca175d360f547",
- },
- }
- receipt := core.Receipt{
- ContractAddress: "",
- CumulativeGasUsed: 7481414,
- GasUsed: 60711,
- Logs: logs,
- Bloom: "0x00000800000000000000001000000000000000400000000080000000000000000000400000010000000000000000000000000000040000000000000002000000000000000000000000000000000000000000000000000000000000000000000000000000020000000000000000000800004000000000000001000000000000000000000000000002000000480000000000000002000000000000000020000000000000000000000000000000000000000080000000000180000c00000000000000002000002000000040000000000000000000000000000010000000000020000000000000000000002000000000000000000000000400800000000000000000",
- Status: 1,
- TxHash: "0x002c4799161d809b23f67884eb6598c9df5894929fe1a9ead97ca175d360f547",
- }
- transaction := fakes.GetFakeTransaction(receipt.TxHash, receipt)
-
- block := core.Block{Transactions: []core.TransactionModel{transaction}}
- _, err := blockRepository.CreateOrUpdateBlock(block)
- Expect(err).To(Not(HaveOccurred()))
- retrievedLogs, err := logsRepository.GetLogs("0x99041f808d598b782d5a3e498681c2452a31da08", 4745407)
-
- Expect(err).NotTo(HaveOccurred())
- expected := logs[1:]
- Expect(retrievedLogs).To(ConsistOf(expected))
- })
- })
-})
diff --git a/pkg/eth/datastore/postgres/repositories/full_sync_receipt_repository.go b/pkg/eth/datastore/postgres/repositories/full_sync_receipt_repository.go
deleted file mode 100644
index c3bb7fd5..00000000
--- a/pkg/eth/datastore/postgres/repositories/full_sync_receipt_repository.go
+++ /dev/null
@@ -1,147 +0,0 @@
-// VulcanizeDB
-// Copyright © 2019 Vulcanize
-
-// This program is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Affero General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-
-// This program is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Affero General Public License for more details.
-
-// You should have received a copy of the GNU Affero General Public License
-// along with this program. If not, see .
-
-package repositories
-
-import (
- "database/sql"
-
- "github.com/jmoiron/sqlx"
- "github.com/sirupsen/logrus"
- "github.com/vulcanize/vulcanizedb/libraries/shared/repository"
- "github.com/vulcanize/vulcanizedb/pkg/eth/core"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
-)
-
-type FullSyncReceiptRepository struct {
- *postgres.DB
-}
-
-func (receiptRepository FullSyncReceiptRepository) CreateReceiptsAndLogs(blockID int64, receipts []core.Receipt) error {
- tx, err := receiptRepository.DB.Beginx()
- if err != nil {
- return err
- }
- for _, receipt := range receipts {
- receiptID, err := receiptRepository.CreateFullSyncReceiptInTx(blockID, receipt, tx)
- if err != nil {
- tx.Rollback()
- return err
- }
- if len(receipt.Logs) > 0 {
- err = createLogs(receipt.Logs, receiptID, tx)
- if err != nil {
- tx.Rollback()
- return err
- }
- }
- }
- tx.Commit()
- return nil
-}
-
-func createReceipt(receipt core.Receipt, blockID int64, tx *sqlx.Tx) (int64, error) {
- var receiptID int64
- err := tx.QueryRow(
- `INSERT INTO full_sync_receipts
- (contract_address, tx_hash, cumulative_gas_used, gas_used, state_root, status, block_id)
- VALUES ($1, $2, $3, $4, $5, $6, $7)
- RETURNING id`,
- receipt.ContractAddress, receipt.TxHash, receipt.CumulativeGasUsed, receipt.GasUsed, receipt.StateRoot, receipt.Status, blockID,
- ).Scan(&receiptID)
- if err != nil {
- logrus.Error("createReceipt: Error inserting: ", err)
- }
- return receiptID, err
-}
-
-func createLogs(logs []core.FullSyncLog, receiptID int64, tx *sqlx.Tx) error {
- for _, log := range logs {
- _, err := tx.Exec(
- `INSERT INTO full_sync_logs (block_number, address, tx_hash, index, topic0, topic1, topic2, topic3, data, receipt_id)
- VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)
- `,
- log.BlockNumber, log.Address, log.TxHash, log.Index, log.Topics[0], log.Topics[1], log.Topics[2], log.Topics[3], log.Data, receiptID,
- )
- if err != nil {
- return err
- }
- }
- return nil
-}
-
-func (FullSyncReceiptRepository) CreateFullSyncReceiptInTx(blockID int64, receipt core.Receipt, tx *sqlx.Tx) (int64, error) {
- var receiptID int64
- addressID, getAddressErr := repository.GetOrCreateAddressInTransaction(tx, receipt.ContractAddress)
- if getAddressErr != nil {
- logrus.Error("createReceipt: Error getting address id: ", getAddressErr)
- return receiptID, getAddressErr
- }
- err := tx.QueryRow(
- `INSERT INTO full_sync_receipts
- (contract_address_id, tx_hash, cumulative_gas_used, gas_used, state_root, status, block_id)
- VALUES ($1, $2, $3, $4, $5, $6, $7)
- RETURNING id`,
- addressID, receipt.TxHash, receipt.CumulativeGasUsed, receipt.GasUsed, receipt.StateRoot, receipt.Status, blockID).Scan(&receiptID)
- if err != nil {
- tx.Rollback()
- logrus.Warning("CreateReceipt: error inserting receipt: ", err)
- return receiptID, err
- }
- return receiptID, nil
-}
-
-func (receiptRepository FullSyncReceiptRepository) GetFullSyncReceipt(txHash string) (core.Receipt, error) {
- row := receiptRepository.DB.QueryRow(
- `SELECT contract_address_id,
- tx_hash,
- cumulative_gas_used,
- gas_used,
- state_root,
- status
- FROM full_sync_receipts
- WHERE tx_hash = $1`, txHash)
- receipt, err := loadReceipt(row)
- if err != nil {
- switch err {
- case sql.ErrNoRows:
- return core.Receipt{}, datastore.ErrReceiptDoesNotExist(txHash)
- default:
- return core.Receipt{}, err
- }
- }
- return receipt, nil
-}
-
-func loadReceipt(receiptsRow *sql.Row) (core.Receipt, error) {
- var contractAddress string
- var txHash string
- var cumulativeGasUsed uint64
- var gasUsed uint64
- var stateRoot string
- var status int
-
- err := receiptsRow.Scan(&contractAddress, &txHash, &cumulativeGasUsed, &gasUsed, &stateRoot, &status)
- return core.Receipt{
- TxHash: txHash,
- ContractAddress: contractAddress,
- CumulativeGasUsed: cumulativeGasUsed,
- GasUsed: gasUsed,
- StateRoot: stateRoot,
- Status: status,
- }, err
-}
diff --git a/pkg/eth/datastore/postgres/repositories/full_sync_receipt_repository_test.go b/pkg/eth/datastore/postgres/repositories/full_sync_receipt_repository_test.go
deleted file mode 100644
index 37959080..00000000
--- a/pkg/eth/datastore/postgres/repositories/full_sync_receipt_repository_test.go
+++ /dev/null
@@ -1,161 +0,0 @@
-// VulcanizeDB
-// Copyright © 2019 Vulcanize
-
-// This program is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Affero General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-
-// This program is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Affero General Public License for more details.
-
-// You should have received a copy of the GNU Affero General Public License
-// along with this program. If not, see .
-
-package repositories_test
-
-import (
- . "github.com/onsi/ginkgo"
- . "github.com/onsi/gomega"
- "github.com/vulcanize/vulcanizedb/pkg/eth/core"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres/repositories"
- "github.com/vulcanize/vulcanizedb/pkg/eth/fakes"
- "github.com/vulcanize/vulcanizedb/test_config"
-)
-
-var _ = Describe("Receipt Repository", func() {
- var blockRepository datastore.BlockRepository
- var logRepository datastore.FullSyncLogRepository
- var receiptRepository datastore.FullSyncReceiptRepository
- var db *postgres.DB
- var node core.Node
- BeforeEach(func() {
- node = core.Node{
- GenesisBlock: "GENESIS",
- NetworkID: 1,
- ID: "b6f90c0fdd8ec9607aed8ee45c69322e47b7063f0bfb7a29c8ecafab24d0a22d24dd2329b5ee6ed4125a03cb14e57fd584e67f9e53e6c631055cbbd82f080845",
- ClientName: "Geth/v1.7.2-stable-1db4ecdc/darwin-amd64/go1.9",
- }
- db = test_config.NewTestDB(node)
- test_config.CleanTestDB(db)
- blockRepository = repositories.NewBlockRepository(db)
- logRepository = repositories.FullSyncLogRepository{DB: db}
- receiptRepository = repositories.FullSyncReceiptRepository{DB: db}
- })
-
- Describe("Saving multiple receipts", func() {
- It("persists each receipt and its logs", func() {
- blockNumber := int64(1234567)
- blockId, err := blockRepository.CreateOrUpdateBlock(core.Block{Number: blockNumber})
- Expect(err).NotTo(HaveOccurred())
- txHashOne := "0xTxHashOne"
- txHashTwo := "0xTxHashTwo"
- addressOne := "0xAddressOne"
- addressTwo := "0xAddressTwo"
- logsOne := []core.FullSyncLog{{
- Address: addressOne,
- BlockNumber: blockNumber,
- TxHash: txHashOne,
- }, {
- Address: addressOne,
- BlockNumber: blockNumber,
- TxHash: txHashOne,
- }}
- logsTwo := []core.FullSyncLog{{
- BlockNumber: blockNumber,
- TxHash: txHashTwo,
- Address: addressTwo,
- }}
- receiptOne := core.Receipt{
- Logs: logsOne,
- TxHash: txHashOne,
- }
- receiptTwo := core.Receipt{
- Logs: logsTwo,
- TxHash: txHashTwo,
- }
- receipts := []core.Receipt{receiptOne, receiptTwo}
-
- err = receiptRepository.CreateReceiptsAndLogs(blockId, receipts)
-
- Expect(err).NotTo(HaveOccurred())
-
- persistedReceiptOne, err := receiptRepository.GetFullSyncReceipt(txHashOne)
- Expect(err).NotTo(HaveOccurred())
- Expect(persistedReceiptOne).NotTo(BeNil())
- Expect(persistedReceiptOne.TxHash).To(Equal(txHashOne))
-
- persistedReceiptTwo, err := receiptRepository.GetFullSyncReceipt(txHashTwo)
- Expect(err).NotTo(HaveOccurred())
- Expect(persistedReceiptTwo).NotTo(BeNil())
- Expect(persistedReceiptTwo.TxHash).To(Equal(txHashTwo))
-
- persistedAddressOneLogs, err := logRepository.GetLogs(addressOne, blockNumber)
- Expect(err).NotTo(HaveOccurred())
- Expect(persistedAddressOneLogs).NotTo(BeNil())
- Expect(len(persistedAddressOneLogs)).To(Equal(2))
-
- persistedAddressTwoLogs, err := logRepository.GetLogs(addressTwo, blockNumber)
- Expect(err).NotTo(HaveOccurred())
- Expect(persistedAddressTwoLogs).NotTo(BeNil())
- Expect(len(persistedAddressTwoLogs)).To(Equal(1))
- })
- })
-
- Describe("Saving receipts on a block's transactions", func() {
- It("returns the receipt when it exists", func() {
- expected := core.Receipt{
- ContractAddress: "0xde0b295669a9fd93d5f28d9ec85e40f4cb697bae",
- CumulativeGasUsed: 7996119,
- GasUsed: 21000,
- Logs: []core.FullSyncLog{},
- StateRoot: "0x88abf7e73128227370aa7baa3dd4e18d0af70e92ef1f9ef426942fbe2dddb733",
- Status: 1,
- TxHash: "0xe340558980f89d5f86045ac11e5cc34e4bcec20f9f1e2a427aa39d87114e8223",
- }
-
- transaction := fakes.GetFakeTransaction(expected.TxHash, expected)
- block := core.Block{Transactions: []core.TransactionModel{transaction}}
-
- _, err := blockRepository.CreateOrUpdateBlock(block)
-
- Expect(err).NotTo(HaveOccurred())
- receipt, err := receiptRepository.GetFullSyncReceipt("0xe340558980f89d5f86045ac11e5cc34e4bcec20f9f1e2a427aa39d87114e8223")
- Expect(err).ToNot(HaveOccurred())
- //Not currently serializing bloom logs
- Expect(receipt.Bloom).To(Equal(core.Receipt{}.Bloom))
- Expect(receipt.TxHash).To(Equal(expected.TxHash))
- Expect(receipt.CumulativeGasUsed).To(Equal(expected.CumulativeGasUsed))
- Expect(receipt.GasUsed).To(Equal(expected.GasUsed))
- Expect(receipt.StateRoot).To(Equal(expected.StateRoot))
- Expect(receipt.Status).To(Equal(expected.Status))
- })
-
- It("returns ErrReceiptDoesNotExist when receipt does not exist", func() {
- receipt, err := receiptRepository.GetFullSyncReceipt("DOES NOT EXIST")
- Expect(err).To(HaveOccurred())
- Expect(receipt).To(BeZero())
- })
-
- It("still saves receipts without logs", func() {
- receipt := core.Receipt{
- TxHash: "0x002c4799161d809b23f67884eb6598c9df5894929fe1a9ead97ca175d360f547",
- }
- transaction := fakes.GetFakeTransaction(receipt.TxHash, receipt)
-
- block := core.Block{
- Transactions: []core.TransactionModel{transaction},
- }
-
- _, err := blockRepository.CreateOrUpdateBlock(block)
-
- Expect(err).NotTo(HaveOccurred())
- _, err = receiptRepository.GetFullSyncReceipt(receipt.TxHash)
- Expect(err).To(Not(HaveOccurred()))
- })
- })
-})
diff --git a/pkg/eth/datastore/postgres/repositories/header_repository.go b/pkg/eth/datastore/postgres/repositories/header_repository.go
index 89e63cf0..d740086c 100644
--- a/pkg/eth/datastore/postgres/repositories/header_repository.go
+++ b/pkg/eth/datastore/postgres/repositories/header_repository.go
@@ -24,7 +24,7 @@ import (
log "github.com/sirupsen/logrus"
"github.com/vulcanize/vulcanizedb/pkg/eth/core"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
)
var ErrValidHeaderExists = errors.New("valid header already exists")
@@ -134,7 +134,7 @@ func (repository HeaderRepository) getHeaderHash(header core.Header) (string, er
func (repository HeaderRepository) InternalInsertHeader(header core.Header) (int64, error) {
var headerID int64
row := repository.database.QueryRowx(
- `INSERT INTO public.headers (block_number, hash, block_timestamp, raw, eth_node_id, eth_node_fingerprint)
+ `INSERT INTO public.headers (block_number, hash, block_timestamp, raw, node_id, eth_node_fingerprint)
VALUES ($1, $2, $3::NUMERIC, $4, $5, $6) ON CONFLICT DO NOTHING RETURNING id`,
header.BlockNumber, header.Hash, header.Timestamp, header.Raw, repository.database.NodeID, repository.database.Node.ID)
err := row.Scan(&headerID)
diff --git a/pkg/eth/datastore/postgres/repositories/header_repository_test.go b/pkg/eth/datastore/postgres/repositories/header_repository_test.go
index e966bffe..fc47e20d 100644
--- a/pkg/eth/datastore/postgres/repositories/header_repository_test.go
+++ b/pkg/eth/datastore/postgres/repositories/header_repository_test.go
@@ -27,8 +27,8 @@ import (
. "github.com/onsi/gomega"
"github.com/vulcanize/vulcanizedb/pkg/eth/core"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
"github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres/repositories"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
"github.com/vulcanize/vulcanizedb/test_config"
)
@@ -75,7 +75,7 @@ var _ = Describe("Block header repository", func() {
_, err = repo.CreateOrUpdateHeader(header)
Expect(err).NotTo(HaveOccurred())
var ethNodeId int64
- err = db.Get(ðNodeId, `SELECT eth_node_id FROM public.headers WHERE block_number = $1`, header.BlockNumber)
+ err = db.Get(ðNodeId, `SELECT node_id FROM public.headers WHERE block_number = $1`, header.BlockNumber)
Expect(err).NotTo(HaveOccurred())
Expect(ethNodeId).To(Equal(db.NodeID))
var ethNodeFingerprint string
diff --git a/pkg/eth/datastore/postgres/repositories/header_sync_log_repository.go b/pkg/eth/datastore/postgres/repositories/header_sync_log_repository.go
index f5459820..7b5e162c 100644
--- a/pkg/eth/datastore/postgres/repositories/header_sync_log_repository.go
+++ b/pkg/eth/datastore/postgres/repositories/header_sync_log_repository.go
@@ -24,7 +24,7 @@ import (
"github.com/sirupsen/logrus"
"github.com/vulcanize/vulcanizedb/libraries/shared/repository"
"github.com/vulcanize/vulcanizedb/pkg/eth/core"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
)
const insertHeaderSyncLogQuery = `INSERT INTO header_sync_logs
diff --git a/pkg/eth/datastore/postgres/repositories/header_sync_log_repository_test.go b/pkg/eth/datastore/postgres/repositories/header_sync_log_repository_test.go
index 70790a0e..7c19fb6b 100644
--- a/pkg/eth/datastore/postgres/repositories/header_sync_log_repository_test.go
+++ b/pkg/eth/datastore/postgres/repositories/header_sync_log_repository_test.go
@@ -25,9 +25,9 @@ import (
repository2 "github.com/vulcanize/vulcanizedb/libraries/shared/repository"
"github.com/vulcanize/vulcanizedb/libraries/shared/test_data"
"github.com/vulcanize/vulcanizedb/pkg/eth/datastore"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
"github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres/repositories"
"github.com/vulcanize/vulcanizedb/pkg/eth/fakes"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
"github.com/vulcanize/vulcanizedb/test_config"
)
diff --git a/pkg/eth/datastore/postgres/repositories/header_sync_receipt_repository_test.go b/pkg/eth/datastore/postgres/repositories/header_sync_receipt_repository_test.go
index 9383391f..c7494a9a 100644
--- a/pkg/eth/datastore/postgres/repositories/header_sync_receipt_repository_test.go
+++ b/pkg/eth/datastore/postgres/repositories/header_sync_receipt_repository_test.go
@@ -26,8 +26,8 @@ import (
. "github.com/onsi/gomega"
"github.com/vulcanize/vulcanizedb/pkg/eth/core"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
"github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres/repositories"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
"github.com/vulcanize/vulcanizedb/test_config"
)
diff --git a/pkg/eth/datastore/postgres/repositories/log_filter_repository.go b/pkg/eth/datastore/postgres/repositories/log_filter_repository.go
deleted file mode 100644
index 094c142c..00000000
--- a/pkg/eth/datastore/postgres/repositories/log_filter_repository.go
+++ /dev/null
@@ -1,92 +0,0 @@
-// VulcanizeDB
-// Copyright © 2019 Vulcanize
-
-// This program is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Affero General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-
-// This program is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Affero General Public License for more details.
-
-// You should have received a copy of the GNU Affero General Public License
-// along with this program. If not, see .
-
-package repositories
-
-import (
- "database/sql"
-
- "encoding/json"
- "errors"
-
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
- "github.com/vulcanize/vulcanizedb/pkg/eth/filters"
-)
-
-type FilterRepository struct {
- *postgres.DB
-}
-
-func (filterRepository FilterRepository) CreateFilter(query filters.LogFilter) error {
- _, err := filterRepository.DB.Exec(
- `INSERT INTO log_filters
- (name, from_block, to_block, address, topic0, topic1, topic2, topic3)
- VALUES ($1, NULLIF($2, -1), NULLIF($3, -1), $4, NULLIF($5, ''), NULLIF($6, ''), NULLIF($7, ''), NULLIF($8, ''))`,
- query.Name, query.FromBlock, query.ToBlock, query.Address, query.Topics[0], query.Topics[1], query.Topics[2], query.Topics[3])
- if err != nil {
- return err
- }
- return nil
-}
-
-func (filterRepository FilterRepository) GetFilter(name string) (filters.LogFilter, error) {
- lf := DBLogFilter{}
- err := filterRepository.DB.Get(&lf,
- `SELECT
- id,
- name,
- from_block,
- to_block,
- address,
- json_build_array(topic0, topic1, topic2, topic3) AS topics
- FROM log_filters
- WHERE name = $1`, name)
- if err != nil {
- switch err {
- case sql.ErrNoRows:
- return filters.LogFilter{}, datastore.ErrFilterDoesNotExist(name)
- default:
- return filters.LogFilter{}, err
- }
- }
- dbLogFilterToCoreLogFilter(lf)
- return *lf.LogFilter, nil
-}
-
-type DBTopics []*string
-
-func (t *DBTopics) Scan(src interface{}) error {
- asBytes, ok := src.([]byte)
- if !ok {
- return error(errors.New("scan source was not []byte"))
- }
- return json.Unmarshal(asBytes, &t)
-}
-
-type DBLogFilter struct {
- ID int
- *filters.LogFilter
- Topics DBTopics
-}
-
-func dbLogFilterToCoreLogFilter(lf DBLogFilter) {
- for i, v := range lf.Topics {
- if v != nil {
- lf.LogFilter.Topics[i] = *v
- }
- }
-}
diff --git a/pkg/eth/datastore/postgres/repositories/log_filter_repository_test.go b/pkg/eth/datastore/postgres/repositories/log_filter_repository_test.go
deleted file mode 100644
index 4468e9d1..00000000
--- a/pkg/eth/datastore/postgres/repositories/log_filter_repository_test.go
+++ /dev/null
@@ -1,127 +0,0 @@
-// VulcanizeDB
-// Copyright © 2019 Vulcanize
-
-// This program is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Affero General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-
-// This program is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Affero General Public License for more details.
-
-// You should have received a copy of the GNU Affero General Public License
-// along with this program. If not, see .
-
-package repositories_test
-
-import (
- . "github.com/onsi/ginkgo"
- . "github.com/onsi/gomega"
- "github.com/vulcanize/vulcanizedb/pkg/eth/core"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres/repositories"
- "github.com/vulcanize/vulcanizedb/pkg/eth/filters"
- "github.com/vulcanize/vulcanizedb/test_config"
-)
-
-var _ = Describe("Log Filters Repository", func() {
- var db *postgres.DB
- var filterRepository datastore.FilterRepository
- var node core.Node
- BeforeEach(func() {
- node = core.Node{
- GenesisBlock: "GENESIS",
- NetworkID: 1,
- ID: "b6f90c0fdd8ec9607aed8ee45c69322e47b7063f0bfb7a29c8ecafab24d0a22d24dd2329b5ee6ed4125a03cb14e57fd584e67f9e53e6c631055cbbd82f080845",
- ClientName: "Geth/v1.7.2-stable-1db4ecdc/darwin-amd64/go1.9",
- }
- db = test_config.NewTestDB(node)
- test_config.CleanTestDB(db)
- filterRepository = repositories.FilterRepository{DB: db}
- })
-
- Describe("LogFilter", func() {
-
- It("inserts filter into watched events", func() {
-
- logFilter := filters.LogFilter{
- Name: "TestFilter",
- FromBlock: 1,
- ToBlock: 2,
- Address: "0x8888f1f195afa192cfee860698584c030f4c9db1",
- Topics: core.Topics{
- "0x000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b",
- "",
- "0x000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b",
- "",
- },
- }
- err := filterRepository.CreateFilter(logFilter)
- Expect(err).ToNot(HaveOccurred())
- })
-
- It("returns error if name is not provided", func() {
-
- logFilter := filters.LogFilter{
- FromBlock: 1,
- ToBlock: 2,
- Address: "0x8888f1f195afa192cfee860698584c030f4c9db1",
- Topics: core.Topics{
- "0x000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b",
- "",
- "0x000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b",
- "",
- },
- }
- err := filterRepository.CreateFilter(logFilter)
- Expect(err).To(HaveOccurred())
- })
-
- It("gets a log filter", func() {
-
- expectedLogFilter1 := filters.LogFilter{
- Name: "TestFilter1",
- FromBlock: 1,
- ToBlock: 2,
- Address: "0x8888f1f195afa192cfee860698584c030f4c9db1",
- Topics: core.Topics{
- "0x000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b",
- "",
- "0x000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b",
- "",
- },
- }
- err := filterRepository.CreateFilter(expectedLogFilter1)
- Expect(err).ToNot(HaveOccurred())
- expectedLogFilter2 := filters.LogFilter{
- Name: "TestFilter2",
- FromBlock: 10,
- ToBlock: 20,
- Address: "0x8888f1f195afa192cfee860698584c030f4c9db1",
- Topics: core.Topics{
- "0x000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b",
- "",
- "0x000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b",
- "",
- },
- }
- err = filterRepository.CreateFilter(expectedLogFilter2)
- Expect(err).ToNot(HaveOccurred())
-
- logFilter1, err := filterRepository.GetFilter("TestFilter1")
- Expect(err).ToNot(HaveOccurred())
- Expect(logFilter1).To(Equal(expectedLogFilter1))
- logFilter2, err := filterRepository.GetFilter("TestFilter2")
- Expect(err).ToNot(HaveOccurred())
- Expect(logFilter2).To(Equal(expectedLogFilter2))
- })
-
- It("returns ErrFilterDoesNotExist error when log does not exist", func() {
- _, err := filterRepository.GetFilter("TestFilter1")
- Expect(err).To(Equal(datastore.ErrFilterDoesNotExist("TestFilter1")))
- })
- })
-})
diff --git a/pkg/eth/datastore/postgres/repositories/storage_diff_repository.go b/pkg/eth/datastore/postgres/repositories/storage_diff_repository.go
index 56a9dbc3..81b1b183 100644
--- a/pkg/eth/datastore/postgres/repositories/storage_diff_repository.go
+++ b/pkg/eth/datastore/postgres/repositories/storage_diff_repository.go
@@ -20,7 +20,7 @@ import (
"database/sql"
"github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
)
var ErrDuplicateDiff = sql.ErrNoRows
diff --git a/pkg/eth/datastore/postgres/repositories/storage_diff_repository_test.go b/pkg/eth/datastore/postgres/repositories/storage_diff_repository_test.go
index 96542e00..bd48e7bf 100644
--- a/pkg/eth/datastore/postgres/repositories/storage_diff_repository_test.go
+++ b/pkg/eth/datastore/postgres/repositories/storage_diff_repository_test.go
@@ -24,8 +24,8 @@ import (
. "github.com/onsi/gomega"
"github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils"
"github.com/vulcanize/vulcanizedb/libraries/shared/test_data"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
"github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres/repositories"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
"github.com/vulcanize/vulcanizedb/test_config"
)
diff --git a/pkg/eth/datastore/postgres/repositories/watched_events_repository.go b/pkg/eth/datastore/postgres/repositories/watched_events_repository.go
deleted file mode 100644
index 93e51f25..00000000
--- a/pkg/eth/datastore/postgres/repositories/watched_events_repository.go
+++ /dev/null
@@ -1,52 +0,0 @@
-// VulcanizeDB
-// Copyright © 2019 Vulcanize
-
-// This program is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Affero General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-
-// This program is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Affero General Public License for more details.
-
-// You should have received a copy of the GNU Affero General Public License
-// along with this program. If not, see .
-
-package repositories
-
-import (
- "github.com/sirupsen/logrus"
- "github.com/vulcanize/vulcanizedb/pkg/eth/core"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
-)
-
-type WatchedEventRepository struct {
- *postgres.DB
-}
-
-func (watchedEventRepository WatchedEventRepository) GetWatchedEvents(name string) ([]*core.WatchedEvent, error) {
- rows, err := watchedEventRepository.DB.Queryx(`SELECT id, name, block_number, address, tx_hash, index, topic0, topic1, topic2, topic3, data FROM watched_event_logs where name=$1`, name)
- if err != nil {
- logrus.Error("GetWatchedEvents: error getting watched events: ", err)
- return nil, err
- }
- defer rows.Close()
-
- lgs := make([]*core.WatchedEvent, 0)
- for rows.Next() {
- lg := new(core.WatchedEvent)
- err = rows.StructScan(lg)
- if err != nil {
- logrus.Warn("GetWatchedEvents: error scanning log: ", err)
- return nil, err
- }
- lgs = append(lgs, lg)
- }
- if err = rows.Err(); err != nil {
- logrus.Warn("GetWatchedEvents: error scanning logs: ", err)
- return nil, err
- }
- return lgs, nil
-}
diff --git a/pkg/eth/datastore/postgres/repositories/watched_events_repository_test.go b/pkg/eth/datastore/postgres/repositories/watched_events_repository_test.go
deleted file mode 100644
index 3e83b24e..00000000
--- a/pkg/eth/datastore/postgres/repositories/watched_events_repository_test.go
+++ /dev/null
@@ -1,161 +0,0 @@
-// VulcanizeDB
-// Copyright © 2019 Vulcanize
-
-// This program is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Affero General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-
-// This program is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Affero General Public License for more details.
-
-// You should have received a copy of the GNU Affero General Public License
-// along with this program. If not, see .
-
-package repositories_test
-
-import (
- . "github.com/onsi/ginkgo"
- . "github.com/onsi/gomega"
-
- "github.com/vulcanize/vulcanizedb/pkg/eth/core"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres/repositories"
- "github.com/vulcanize/vulcanizedb/pkg/eth/filters"
- "github.com/vulcanize/vulcanizedb/test_config"
-)
-
-var _ = Describe("Watched Events Repository", func() {
- var db *postgres.DB
- var blocksRepository datastore.BlockRepository
- var filterRepository datastore.FilterRepository
- var logRepository datastore.FullSyncLogRepository
- var receiptRepository datastore.FullSyncReceiptRepository
- var watchedEventRepository datastore.WatchedEventRepository
-
- BeforeEach(func() {
- db = test_config.NewTestDB(core.Node{})
- test_config.CleanTestDB(db)
- blocksRepository = repositories.NewBlockRepository(db)
- filterRepository = repositories.FilterRepository{DB: db}
- logRepository = repositories.FullSyncLogRepository{DB: db}
- receiptRepository = repositories.FullSyncReceiptRepository{DB: db}
- watchedEventRepository = repositories.WatchedEventRepository{DB: db}
- })
-
- It("retrieves watched event logs that match the event filter", func() {
- filter := filters.LogFilter{
- Name: "Filter1",
- FromBlock: 0,
- ToBlock: 10,
- Address: "0x123",
- Topics: core.Topics{0: "event1=10", 2: "event3=hello"},
- }
- logs := []core.FullSyncLog{
- {
- BlockNumber: 0,
- TxHash: "0x1",
- Address: "0x123",
- Topics: core.Topics{0: "event1=10", 2: "event3=hello"},
- Index: 0,
- Data: "",
- },
- }
- expectedWatchedEventLog := []*core.WatchedEvent{
- {
- Name: "Filter1",
- BlockNumber: 0,
- TxHash: "0x1",
- Address: "0x123",
- Topic0: "event1=10",
- Topic2: "event3=hello",
- Index: 0,
- Data: "",
- },
- }
- err := filterRepository.CreateFilter(filter)
- Expect(err).ToNot(HaveOccurred())
- blockId, err := blocksRepository.CreateOrUpdateBlock(core.Block{})
- Expect(err).NotTo(HaveOccurred())
- tx, txBeginErr := db.Beginx()
- Expect(txBeginErr).NotTo(HaveOccurred())
- receiptId, err := receiptRepository.CreateFullSyncReceiptInTx(blockId, core.Receipt{}, tx)
- tx.Commit()
- Expect(err).NotTo(HaveOccurred())
- err = logRepository.CreateLogs(logs, receiptId)
- Expect(err).ToNot(HaveOccurred())
- matchingLogs, err := watchedEventRepository.GetWatchedEvents("Filter1")
- Expect(err).ToNot(HaveOccurred())
- Expect(len(matchingLogs)).To(Equal(1))
- Expect(matchingLogs[0].Name).To(Equal(expectedWatchedEventLog[0].Name))
- Expect(matchingLogs[0].BlockNumber).To(Equal(expectedWatchedEventLog[0].BlockNumber))
- Expect(matchingLogs[0].TxHash).To(Equal(expectedWatchedEventLog[0].TxHash))
- Expect(matchingLogs[0].Address).To(Equal(expectedWatchedEventLog[0].Address))
- Expect(matchingLogs[0].Topic0).To(Equal(expectedWatchedEventLog[0].Topic0))
- Expect(matchingLogs[0].Topic1).To(Equal(expectedWatchedEventLog[0].Topic1))
- Expect(matchingLogs[0].Topic2).To(Equal(expectedWatchedEventLog[0].Topic2))
- Expect(matchingLogs[0].Data).To(Equal(expectedWatchedEventLog[0].Data))
- })
-
- It("retrieves a watched event log by name", func() {
- filter := filters.LogFilter{
- Name: "Filter1",
- FromBlock: 0,
- ToBlock: 10,
- Address: "0x123",
- Topics: core.Topics{0: "event1=10", 2: "event3=hello"},
- }
- logs := []core.FullSyncLog{
- {
- BlockNumber: 0,
- TxHash: "0x1",
- Address: "0x123",
- Topics: core.Topics{0: "event1=10", 2: "event3=hello"},
- Index: 0,
- Data: "",
- },
- {
- BlockNumber: 100,
- TxHash: "",
- Address: "",
- Topics: core.Topics{},
- Index: 0,
- Data: "",
- },
- }
- expectedWatchedEventLog := []*core.WatchedEvent{{
- Name: "Filter1",
- BlockNumber: 0,
- TxHash: "0x1",
- Address: "0x123",
- Topic0: "event1=10",
- Topic2: "event3=hello",
- Index: 0,
- Data: "",
- }}
- err := filterRepository.CreateFilter(filter)
- Expect(err).ToNot(HaveOccurred())
- blockId, err := blocksRepository.CreateOrUpdateBlock(core.Block{Hash: "Ox123"})
- Expect(err).NotTo(HaveOccurred())
- tx, _ := db.Beginx()
- receiptId, err := receiptRepository.CreateFullSyncReceiptInTx(blockId, core.Receipt{}, tx)
- tx.Commit()
- Expect(err).NotTo(HaveOccurred())
- err = logRepository.CreateLogs(logs, receiptId)
- Expect(err).ToNot(HaveOccurred())
- matchingLogs, err := watchedEventRepository.GetWatchedEvents("Filter1")
- Expect(err).ToNot(HaveOccurred())
- Expect(len(matchingLogs)).To(Equal(1))
- Expect(matchingLogs[0].Name).To(Equal(expectedWatchedEventLog[0].Name))
- Expect(matchingLogs[0].BlockNumber).To(Equal(expectedWatchedEventLog[0].BlockNumber))
- Expect(matchingLogs[0].TxHash).To(Equal(expectedWatchedEventLog[0].TxHash))
- Expect(matchingLogs[0].Address).To(Equal(expectedWatchedEventLog[0].Address))
- Expect(matchingLogs[0].Topic0).To(Equal(expectedWatchedEventLog[0].Topic0))
- Expect(matchingLogs[0].Topic1).To(Equal(expectedWatchedEventLog[0].Topic1))
- Expect(matchingLogs[0].Topic2).To(Equal(expectedWatchedEventLog[0].Topic2))
- Expect(matchingLogs[0].Data).To(Equal(expectedWatchedEventLog[0].Data))
- })
-})
diff --git a/pkg/eth/datastore/repository.go b/pkg/eth/datastore/repository.go
index 6eb90757..fb629cce 100644
--- a/pkg/eth/datastore/repository.go
+++ b/pkg/eth/datastore/repository.go
@@ -29,7 +29,6 @@ type AddressRepository interface {
}
type BlockRepository interface {
- CreateOrUpdateBlock(block core.Block) (int64, error)
GetBlock(blockNumber int64) (core.Block, error)
MissingBlockNumbers(startingBlockNumber, endingBlockNumber int64, nodeID string) []int64
SetBlocksStatus(chainHead int64) error
diff --git a/pkg/eth/fakes/mock_blockchain.go b/pkg/eth/fakes/mock_blockchain.go
index 2847c66c..9b2c84a5 100644
--- a/pkg/eth/fakes/mock_blockchain.go
+++ b/pkg/eth/fakes/mock_blockchain.go
@@ -52,7 +52,7 @@ type MockBlockChain struct {
func NewMockBlockChain() *MockBlockChain {
return &MockBlockChain{
- node: core.Node{GenesisBlock: "GENESIS", NetworkID: 1, ID: "x123", ClientName: "Geth"},
+ node: core.Node{GenesisBlock: "GENESIS", NetworkID: "1", ID: "x123", ClientName: "Geth"},
}
}
diff --git a/pkg/eth/history/block_validator.go b/pkg/eth/history/block_validator.go
deleted file mode 100644
index 6cc3958b..00000000
--- a/pkg/eth/history/block_validator.go
+++ /dev/null
@@ -1,65 +0,0 @@
-// VulcanizeDB
-// Copyright © 2019 Vulcanize
-
-// This program is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Affero General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-
-// This program is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Affero General Public License for more details.
-
-// You should have received a copy of the GNU Affero General Public License
-// along with this program. If not, see .
-
-package history
-
-import (
- "github.com/sirupsen/logrus"
- "github.com/vulcanize/vulcanizedb/pkg/eth/core"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore"
-)
-
-type BlockValidator struct {
- blockchain core.BlockChain
- blockRepository datastore.BlockRepository
- windowSize int
-}
-
-func NewBlockValidator(blockchain core.BlockChain, blockRepository datastore.BlockRepository, windowSize int) *BlockValidator {
- return &BlockValidator{
- blockchain: blockchain,
- blockRepository: blockRepository,
- windowSize: windowSize,
- }
-}
-
-func (bv BlockValidator) ValidateBlocks() (ValidationWindow, error) {
- window, err := MakeValidationWindow(bv.blockchain, bv.windowSize)
- if err != nil {
- logrus.Error("ValidateBlocks: error creating validation window: ", err)
- return ValidationWindow{}, err
- }
-
- blockNumbers := MakeRange(window.LowerBound, window.UpperBound)
- _, err = RetrieveAndUpdateBlocks(bv.blockchain, bv.blockRepository, blockNumbers)
- if err != nil {
- logrus.Error("ValidateBlocks: error getting and updating blocks: ", err)
- return ValidationWindow{}, err
- }
-
- lastBlock, err := bv.blockchain.LastBlock()
- if err != nil {
- logrus.Error("ValidateBlocks: error getting last block: ", err)
- return ValidationWindow{}, err
- }
-
- err = bv.blockRepository.SetBlocksStatus(lastBlock.Int64())
- if err != nil {
- logrus.Error("ValidateBlocks: error setting block status: ", err)
- return ValidationWindow{}, err
- }
- return window, nil
-}
diff --git a/pkg/eth/history/block_validator_test.go b/pkg/eth/history/block_validator_test.go
deleted file mode 100644
index 8771455c..00000000
--- a/pkg/eth/history/block_validator_test.go
+++ /dev/null
@@ -1,51 +0,0 @@
-// VulcanizeDB
-// Copyright © 2019 Vulcanize
-
-// This program is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Affero General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-
-// This program is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Affero General Public License for more details.
-
-// You should have received a copy of the GNU Affero General Public License
-// along with this program. If not, see .
-
-package history_test
-
-import (
- "math/big"
-
- . "github.com/onsi/ginkgo"
- . "github.com/onsi/gomega"
-
- "github.com/vulcanize/vulcanizedb/pkg/eth/fakes"
- "github.com/vulcanize/vulcanizedb/pkg/eth/history"
-)
-
-var _ = Describe("Blocks validator", func() {
-
- It("calls create or update for all blocks within the window", func() {
- blockChain := fakes.NewMockBlockChain()
- blockChain.SetLastBlock(big.NewInt(7))
- blocksRepository := fakes.NewMockBlockRepository()
- validator := history.NewBlockValidator(blockChain, blocksRepository, 2)
-
- window, err := validator.ValidateBlocks()
- Expect(err).NotTo(HaveOccurred())
-
- Expect(window).To(Equal(history.ValidationWindow{LowerBound: 5, UpperBound: 7}))
- blocksRepository.AssertCreateOrUpdateBlockCallCountEquals(3)
- })
-
- It("returns the number of largest block", func() {
- blockChain := fakes.NewMockBlockChain()
- blockChain.SetLastBlock(big.NewInt(3))
- maxBlockNumber, _ := blockChain.LastBlock()
-
- Expect(maxBlockNumber.Int64()).To(Equal(int64(3)))
- })
-})
diff --git a/pkg/eth/history/populate_blocks.go b/pkg/eth/history/populate_blocks.go
deleted file mode 100644
index b3bf064b..00000000
--- a/pkg/eth/history/populate_blocks.go
+++ /dev/null
@@ -1,69 +0,0 @@
-// VulcanizeDB
-// Copyright © 2019 Vulcanize
-
-// This program is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Affero General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-
-// This program is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Affero General Public License for more details.
-
-// You should have received a copy of the GNU Affero General Public License
-// along with this program. If not, see .
-
-package history
-
-import (
- "fmt"
-
- log "github.com/sirupsen/logrus"
-
- "github.com/vulcanize/vulcanizedb/pkg/eth/core"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore"
-)
-
-func PopulateMissingBlocks(blockchain core.BlockChain, blockRepository datastore.BlockRepository, startingBlockNumber int64) (int, error) {
- lastBlock, err := blockchain.LastBlock()
- if err != nil {
- log.Error("PopulateMissingBlocks: error getting last block: ", err)
- return 0, err
- }
- blockRange := blockRepository.MissingBlockNumbers(startingBlockNumber, lastBlock.Int64(), blockchain.Node().ID)
-
- if len(blockRange) == 0 {
- return 0, nil
- }
-
- log.Debug(getBlockRangeString(blockRange))
- _, err = RetrieveAndUpdateBlocks(blockchain, blockRepository, blockRange)
- if err != nil {
- log.Error("PopulateMissingBlocks: error gettings/updating blocks: ", err)
- return 0, err
- }
- return len(blockRange), nil
-}
-
-func RetrieveAndUpdateBlocks(blockchain core.BlockChain, blockRepository datastore.BlockRepository, blockNumbers []int64) (int, error) {
- for _, blockNumber := range blockNumbers {
- block, err := blockchain.GetBlockByNumber(blockNumber)
- if err != nil {
- log.Error("RetrieveAndUpdateBlocks: error getting block: ", err)
- return 0, err
- }
-
- _, err = blockRepository.CreateOrUpdateBlock(block)
- if err != nil {
- log.Error("RetrieveAndUpdateBlocks: error creating/updating block: ", err)
- return 0, err
- }
-
- }
- return len(blockNumbers), nil
-}
-
-func getBlockRangeString(blockRange []int64) string {
- return fmt.Sprintf("Backfilling |%v| blocks", len(blockRange))
-}
diff --git a/pkg/eth/history/populate_blocks_test.go b/pkg/eth/history/populate_blocks_test.go
deleted file mode 100644
index aa0fe7de..00000000
--- a/pkg/eth/history/populate_blocks_test.go
+++ /dev/null
@@ -1,90 +0,0 @@
-// VulcanizeDB
-// Copyright © 2019 Vulcanize
-
-// This program is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Affero General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-
-// This program is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Affero General Public License for more details.
-
-// You should have received a copy of the GNU Affero General Public License
-// along with this program. If not, see .
-
-package history_test
-
-import (
- "math/big"
-
- . "github.com/onsi/ginkgo"
- . "github.com/onsi/gomega"
- "github.com/vulcanize/vulcanizedb/pkg/eth/fakes"
- "github.com/vulcanize/vulcanizedb/pkg/eth/history"
-)
-
-var _ = Describe("Populating blocks", func() {
- var blockRepository *fakes.MockBlockRepository
-
- BeforeEach(func() {
- blockRepository = fakes.NewMockBlockRepository()
- })
-
- It("fills in the only missing block (BlockNumber 1)", func() {
- blockChain := fakes.NewMockBlockChain()
- blockChain.SetLastBlock(big.NewInt(2))
- blockRepository.SetMissingBlockNumbersReturnArray([]int64{2})
-
- blocksAdded, err := history.PopulateMissingBlocks(blockChain, blockRepository, 1)
- Expect(err).NotTo(HaveOccurred())
- _, err = blockRepository.GetBlock(1)
-
- Expect(blocksAdded).To(Equal(1))
- Expect(err).ToNot(HaveOccurred())
- })
-
- It("fills in the three missing blocks (Numbers: 5,8,10)", func() {
- blockChain := fakes.NewMockBlockChain()
- blockChain.SetLastBlock(big.NewInt(13))
- blockRepository.SetMissingBlockNumbersReturnArray([]int64{5, 8, 10})
-
- blocksAdded, err := history.PopulateMissingBlocks(blockChain, blockRepository, 5)
-
- Expect(err).NotTo(HaveOccurred())
- Expect(blocksAdded).To(Equal(3))
- blockRepository.AssertCreateOrUpdateBlocksCallCountAndBlockNumbersEquals(3, []int64{5, 8, 10})
- })
-
- It("returns the number of blocks created", func() {
- blockChain := fakes.NewMockBlockChain()
- blockChain.SetLastBlock(big.NewInt(6))
- blockRepository.SetMissingBlockNumbersReturnArray([]int64{4, 5})
-
- numberOfBlocksCreated, err := history.PopulateMissingBlocks(blockChain, blockRepository, 3)
-
- Expect(err).NotTo(HaveOccurred())
- Expect(numberOfBlocksCreated).To(Equal(2))
- })
-
- It("updates the repository with a range of blocks w/in the range ", func() {
- blockChain := fakes.NewMockBlockChain()
-
- _, err := history.RetrieveAndUpdateBlocks(blockChain, blockRepository, history.MakeRange(2, 5))
-
- Expect(err).NotTo(HaveOccurred())
- blockRepository.AssertCreateOrUpdateBlocksCallCountAndBlockNumbersEquals(4, []int64{2, 3, 4, 5})
- })
-
- It("does not call repository create block when there is an error", func() {
- blockChain := fakes.NewMockBlockChain()
- blockChain.SetGetBlockByNumberErr(fakes.FakeError)
- blocks := history.MakeRange(1, 10)
-
- _, err := history.RetrieveAndUpdateBlocks(blockChain, blockRepository, blocks)
-
- Expect(err).To(HaveOccurred())
- blockRepository.AssertCreateOrUpdateBlockCallCountEquals(0)
- })
-})
diff --git a/pkg/eth/history/populate_headers.go b/pkg/eth/history/populate_headers.go
index d5138e29..1114cc20 100644
--- a/pkg/eth/history/populate_headers.go
+++ b/pkg/eth/history/populate_headers.go
@@ -17,6 +17,7 @@
package history
import (
+ "fmt"
"github.com/sirupsen/logrus"
"github.com/vulcanize/vulcanizedb/pkg/eth/core"
@@ -61,3 +62,7 @@ func RetrieveAndUpdateHeaders(blockChain core.BlockChain, headerRepository datas
}
return len(blockNumbers), nil
}
+
+func getBlockRangeString(blockRange []int64) string {
+ return fmt.Sprintf("Backfilling |%v| blocks", len(blockRange))
+}
diff --git a/pkg/eth/node/node.go b/pkg/eth/node/node.go
index d5ce5c97..3ca2b0e1 100644
--- a/pkg/eth/node/node.go
+++ b/pkg/eth/node/node.go
@@ -18,6 +18,7 @@ package node
import (
"context"
+ "fmt"
"regexp"
"strconv"
"strings"
@@ -60,7 +61,7 @@ func MakeNode(rpcClient core.RPCClient) core.Node {
id, name := pr.NodeInfo()
return core.Node{
GenesisBlock: pr.GenesisBlock(),
- NetworkID: pr.NetworkID(),
+ NetworkID: fmt.Sprintf("%f", pr.NetworkID()),
ID: id,
ClientName: name,
}
diff --git a/pkg/eth/node/node_test.go b/pkg/eth/node/node_test.go
index 5afda719..3bee1fad 100644
--- a/pkg/eth/node/node_test.go
+++ b/pkg/eth/node/node_test.go
@@ -82,7 +82,7 @@ var _ = Describe("Node Info", func() {
It("returns the network id for any client", func() {
client := fakes.NewMockRPCClient()
n := node.MakeNode(client)
- Expect(n.NetworkID).To(Equal(float64(1234)))
+ Expect(n.NetworkID).To(Equal("1234.000000"))
})
It("returns geth ID and client name for geth node", func() {
diff --git a/pkg/eth/datastore/postgres/errors.go b/pkg/postgres/errors.go
similarity index 100%
rename from pkg/eth/datastore/postgres/errors.go
rename to pkg/postgres/errors.go
diff --git a/pkg/eth/datastore/postgres/postgres.go b/pkg/postgres/postgres.go
similarity index 90%
rename from pkg/eth/datastore/postgres/postgres.go
rename to pkg/postgres/postgres.go
index 26e5bf9c..8062decf 100644
--- a/pkg/eth/datastore/postgres/postgres.go
+++ b/pkg/postgres/postgres.go
@@ -46,13 +46,13 @@ func NewDB(databaseConfig config.Database, node core.Node) (*DB, error) {
func (db *DB) CreateNode(node *core.Node) error {
var nodeID int64
err := db.QueryRow(
- `INSERT INTO eth_nodes (genesis_block, network_id, eth_node_id, client_name)
+ `INSERT INTO nodes (genesis_block, network_id, node_id, client_name)
VALUES ($1, $2, $3, $4)
- ON CONFLICT (genesis_block, network_id, eth_node_id)
+ ON CONFLICT (genesis_block, network_id, node_id)
DO UPDATE
SET genesis_block = $1,
network_id = $2,
- eth_node_id = $3,
+ node_id = $3,
client_name = $4
RETURNING id`,
node.GenesisBlock, node.NetworkID, node.ID, node.ClientName).Scan(&nodeID)
diff --git a/pkg/eth/datastore/postgres/postgres_suite_test.go b/pkg/postgres/postgres_suite_test.go
similarity index 100%
rename from pkg/eth/datastore/postgres/postgres_suite_test.go
rename to pkg/postgres/postgres_suite_test.go
diff --git a/pkg/eth/datastore/postgres/postgres_test.go b/pkg/postgres/postgres_test.go
similarity index 55%
rename from pkg/eth/datastore/postgres/postgres_test.go
rename to pkg/postgres/postgres_test.go
index 37621f7a..b927c1ed 100644
--- a/pkg/eth/datastore/postgres/postgres_test.go
+++ b/pkg/postgres/postgres_test.go
@@ -28,8 +28,7 @@ import (
. "github.com/onsi/gomega"
"github.com/vulcanize/vulcanizedb/pkg/config"
"github.com/vulcanize/vulcanizedb/pkg/eth/core"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres/repositories"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
"github.com/vulcanize/vulcanizedb/test_config"
)
@@ -82,30 +81,9 @@ var _ = Describe("Postgres DB", func() {
Expect(actual).To(Equal(bi))
})
- It("does not commit block if block is invalid", func() {
- //badNonce violates db Nonce field length
- badNonce := fmt.Sprintf("x %s", strings.Repeat("1", 100))
- badBlock := core.Block{
- Number: 123,
- Nonce: badNonce,
- Transactions: []core.TransactionModel{},
- }
- node := core.Node{GenesisBlock: "GENESIS", NetworkID: 1, ID: "x123", ClientName: "geth"}
- db := test_config.NewTestDB(node)
- test_config.CleanTestDB(db)
- blocksRepository := repositories.NewBlockRepository(db)
-
- _, err1 := blocksRepository.CreateOrUpdateBlock(badBlock)
-
- Expect(err1).To(HaveOccurred())
- savedBlock, err2 := blocksRepository.GetBlock(123)
- Expect(err2).To(HaveOccurred())
- Expect(savedBlock).To(BeZero())
- })
-
It("throws error when can't connect to the database", func() {
invalidDatabase := config.Database{}
- node := core.Node{GenesisBlock: "GENESIS", NetworkID: 1, ID: "x123", ClientName: "geth"}
+ node := core.Node{GenesisBlock: "GENESIS", NetworkID: "1", ID: "x123", ClientName: "geth"}
_, err := postgres.NewDB(invalidDatabase, node)
@@ -115,51 +93,11 @@ var _ = Describe("Postgres DB", func() {
It("throws error when can't create node", func() {
badHash := fmt.Sprintf("x %s", strings.Repeat("1", 100))
- node := core.Node{GenesisBlock: badHash, NetworkID: 1, ID: "x123", ClientName: "geth"}
+ node := core.Node{GenesisBlock: badHash, NetworkID: "1", ID: "x123", ClientName: "geth"}
_, err := postgres.NewDB(test_config.DBConfig, node)
Expect(err).To(HaveOccurred())
Expect(err.Error()).To(ContainSubstring(postgres.SettingNodeFailedMsg))
})
-
- It("does not commit log if log is invalid", func() {
- //badTxHash violates db tx_hash field length
- badTxHash := fmt.Sprintf("x %s", strings.Repeat("1", 100))
- badLog := core.FullSyncLog{
- Address: "x123",
- BlockNumber: 1,
- TxHash: badTxHash,
- }
- node := core.Node{GenesisBlock: "GENESIS", NetworkID: 1, ID: "x123", ClientName: "geth"}
- db, _ := postgres.NewDB(test_config.DBConfig, node)
- logRepository := repositories.FullSyncLogRepository{DB: db}
-
- err := logRepository.CreateLogs([]core.FullSyncLog{badLog}, 123)
-
- Expect(err).ToNot(BeNil())
- savedBlock, err := logRepository.GetLogs("x123", 1)
- Expect(savedBlock).To(BeNil())
- Expect(err).To(Not(HaveOccurred()))
- })
-
- It("does not commit block or transactions if transaction is invalid", func() {
- //badHash violates db To field length
- badHash := fmt.Sprintf("x %s", strings.Repeat("1", 100))
- badTransaction := core.TransactionModel{To: badHash}
- block := core.Block{
- Number: 123,
- Transactions: []core.TransactionModel{badTransaction},
- }
- node := core.Node{GenesisBlock: "GENESIS", NetworkID: 1, ID: "x123", ClientName: "geth"}
- db, _ := postgres.NewDB(test_config.DBConfig, node)
- blockRepository := repositories.NewBlockRepository(db)
-
- _, err1 := blockRepository.CreateOrUpdateBlock(block)
-
- Expect(err1).To(HaveOccurred())
- savedBlock, err2 := blockRepository.GetBlock(123)
- Expect(err2).To(HaveOccurred())
- Expect(savedBlock).To(BeZero())
- })
})
diff --git a/pkg/super_node/btc/indexer.go b/pkg/super_node/btc/indexer.go
index 4669229c..d0a88f1a 100644
--- a/pkg/super_node/btc/indexer.go
+++ b/pkg/super_node/btc/indexer.go
@@ -24,7 +24,7 @@ import (
"github.com/jmoiron/sqlx"
"github.com/lib/pq"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
"github.com/vulcanize/vulcanizedb/pkg/super_node/shared"
)
@@ -47,7 +47,7 @@ func (in *CIDIndexer) Index(cids shared.CIDsForIndexing) error {
if err != nil {
return err
}
- headerID, err := in.indexHeaderCID(tx, cidWrapper.HeaderCID)
+ headerID, err := in.indexHeaderCID(tx, cidWrapper.HeaderCID, in.db.NodeID)
if err != nil {
logrus.Error("btc indexer error when indexing header")
return err
@@ -59,13 +59,13 @@ func (in *CIDIndexer) Index(cids shared.CIDsForIndexing) error {
return tx.Commit()
}
-func (in *CIDIndexer) indexHeaderCID(tx *sqlx.Tx, header HeaderModel) (int64, error) {
+func (in *CIDIndexer) indexHeaderCID(tx *sqlx.Tx, header HeaderModel, nodeID int64) (int64, error) {
var headerID int64
- err := tx.QueryRowx(`INSERT INTO btc.header_cids (block_number, block_hash, parent_hash, cid, timestamp, bits)
- VALUES ($1, $2, $3, $4, $5, $6)
- ON CONFLICT (block_number, block_hash) DO UPDATE SET (parent_hash, cid, timestamp, bits) = ($3, $4, $5, $6)
+ err := tx.QueryRowx(`INSERT INTO btc.header_cids (block_number, block_hash, parent_hash, cid, timestamp, bits, node_id)
+ VALUES ($1, $2, $3, $4, $5, $6, $7)
+ ON CONFLICT (block_number, block_hash) DO UPDATE SET (parent_hash, cid, timestamp, bits, node_id) = ($3, $4, $5, $6, $7)
RETURNING id`,
- header.BlockNumber, header.BlockHash, header.ParentHash, header.CID, header.Timestamp, header.Bits).Scan(&headerID)
+ header.BlockNumber, header.BlockHash, header.ParentHash, header.CID, header.Timestamp, header.Bits, nodeID).Scan(&headerID)
return headerID, err
}
diff --git a/pkg/super_node/btc/indexer_test.go b/pkg/super_node/btc/indexer_test.go
index d5a093b1..a0cb2f99 100644
--- a/pkg/super_node/btc/indexer_test.go
+++ b/pkg/super_node/btc/indexer_test.go
@@ -20,7 +20,7 @@ import (
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
"github.com/vulcanize/vulcanizedb/pkg/super_node/btc"
"github.com/vulcanize/vulcanizedb/pkg/super_node/btc/mocks"
"github.com/vulcanize/vulcanizedb/pkg/super_node/shared"
diff --git a/pkg/super_node/btc/models.go b/pkg/super_node/btc/models.go
index 05c06e01..f41ced49 100644
--- a/pkg/super_node/btc/models.go
+++ b/pkg/super_node/btc/models.go
@@ -27,6 +27,7 @@ type HeaderModel struct {
CID string `db:"cid"`
Timestamp int64 `db:"timestamp"`
Bits uint32 `db:"bits"`
+ NodeID int64 `db:"node_id"`
}
// TxModel is the db model for btc.transaction_cids table
diff --git a/pkg/super_node/btc/payload_fetcher.go b/pkg/super_node/btc/payload_fetcher.go
index e778e843..6bc0c476 100644
--- a/pkg/super_node/btc/payload_fetcher.go
+++ b/pkg/super_node/btc/payload_fetcher.go
@@ -32,10 +32,14 @@ type PayloadFetcher struct {
}
// NewStateDiffFetcher returns a PayloadFetcher
-func NewPayloadFetcher(c *rpcclient.Client) *PayloadFetcher {
- return &PayloadFetcher{
- client: c,
+func NewPayloadFetcher(c *rpcclient.ConnConfig) (*PayloadFetcher, error) {
+ client, err := rpcclient.New(c, nil)
+ if err != nil {
+ return nil, err
}
+ return &PayloadFetcher{
+ client: client,
+ }, nil
}
// FetchAt fetches the block payloads at the given block heights
diff --git a/pkg/super_node/btc/retriever.go b/pkg/super_node/btc/retriever.go
index 3e019a75..d49bd3c4 100644
--- a/pkg/super_node/btc/retriever.go
+++ b/pkg/super_node/btc/retriever.go
@@ -26,7 +26,7 @@ import (
"github.com/jmoiron/sqlx"
log "github.com/sirupsen/logrus"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
"github.com/vulcanize/vulcanizedb/pkg/super_node/shared"
)
diff --git a/pkg/super_node/btc/test_helpers.go b/pkg/super_node/btc/test_helpers.go
index 7e431af6..01fa5af0 100644
--- a/pkg/super_node/btc/test_helpers.go
+++ b/pkg/super_node/btc/test_helpers.go
@@ -19,7 +19,7 @@ package btc
import (
. "github.com/onsi/gomega"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
)
// TearDownDB is used to tear down the super node dbs after tests
diff --git a/pkg/super_node/constructors.go b/pkg/super_node/constructors.go
index b288e853..84782433 100644
--- a/pkg/super_node/constructors.go
+++ b/pkg/super_node/constructors.go
@@ -26,7 +26,7 @@ import (
"github.com/ethereum/go-ethereum/rpc"
"github.com/vulcanize/vulcanizedb/pkg/eth/core"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
"github.com/vulcanize/vulcanizedb/pkg/super_node/btc"
"github.com/vulcanize/vulcanizedb/pkg/super_node/eth"
"github.com/vulcanize/vulcanizedb/pkg/super_node/shared"
@@ -102,11 +102,11 @@ func NewPaylaodFetcher(chain shared.ChainType, client interface{}) (shared.Paylo
}
return eth.NewPayloadFetcher(batchClient), nil
case shared.Bitcoin:
- rpcClient, ok := client.(*rpcclient.Client)
+ connConfig, ok := client.(*rpcclient.ConnConfig)
if !ok {
return nil, fmt.Errorf("bitcoin payload fetcher constructor expected client type %T got %T", &rpcclient.Client{}, client)
}
- return btc.NewPayloadFetcher(rpcClient), nil
+ return btc.NewPayloadFetcher(connConfig)
default:
return nil, fmt.Errorf("invalid chain %s for payload fetcher constructor", chain.String())
}
diff --git a/pkg/super_node/eth/backend.go b/pkg/super_node/eth/backend.go
index 8edc7e2d..635d5f1e 100644
--- a/pkg/super_node/eth/backend.go
+++ b/pkg/super_node/eth/backend.go
@@ -28,7 +28,7 @@ import (
"github.com/ethereum/go-ethereum/rpc"
"github.com/sirupsen/logrus"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
)
var (
diff --git a/pkg/super_node/eth/indexer.go b/pkg/super_node/eth/indexer.go
index da0c664a..cc70720c 100644
--- a/pkg/super_node/eth/indexer.go
+++ b/pkg/super_node/eth/indexer.go
@@ -25,7 +25,7 @@ import (
"github.com/jmoiron/sqlx"
log "github.com/sirupsen/logrus"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
)
// Indexer satisfies the Indexer interface for ethereum
@@ -50,7 +50,7 @@ func (in *CIDIndexer) Index(cids shared.CIDsForIndexing) error {
if err != nil {
return err
}
- headerID, err := in.indexHeaderCID(tx, cidPayload.HeaderCID)
+ headerID, err := in.indexHeaderCID(tx, cidPayload.HeaderCID, in.db.NodeID)
if err != nil {
if err := tx.Rollback(); err != nil {
log.Error(err)
@@ -80,12 +80,12 @@ func (in *CIDIndexer) Index(cids shared.CIDsForIndexing) error {
return tx.Commit()
}
-func (in *CIDIndexer) indexHeaderCID(tx *sqlx.Tx, header HeaderModel) (int64, error) {
+func (in *CIDIndexer) indexHeaderCID(tx *sqlx.Tx, header HeaderModel, nodeID int64) (int64, error) {
var headerID int64
- err := tx.QueryRowx(`INSERT INTO eth.header_cids (block_number, block_hash, parent_hash, cid, td) VALUES ($1, $2, $3, $4, $5)
- ON CONFLICT (block_number, block_hash) DO UPDATE SET (parent_hash, cid, td) = ($3, $4, $5)
+ err := tx.QueryRowx(`INSERT INTO eth.header_cids (block_number, block_hash, parent_hash, cid, td, node_id) VALUES ($1, $2, $3, $4, $5, $6)
+ ON CONFLICT (block_number, block_hash) DO UPDATE SET (parent_hash, cid, td, node_id) = ($3, $4, $5, $6)
RETURNING id`,
- header.BlockNumber, header.BlockHash, header.ParentHash, header.CID, header.TotalDifficulty).Scan(&headerID)
+ header.BlockNumber, header.BlockHash, header.ParentHash, header.CID, header.TotalDifficulty, nodeID).Scan(&headerID)
return headerID, err
}
diff --git a/pkg/super_node/eth/indexer_test.go b/pkg/super_node/eth/indexer_test.go
index e176f81d..c2c66a4b 100644
--- a/pkg/super_node/eth/indexer_test.go
+++ b/pkg/super_node/eth/indexer_test.go
@@ -20,7 +20,7 @@ import (
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
"github.com/vulcanize/vulcanizedb/pkg/super_node/eth"
"github.com/vulcanize/vulcanizedb/pkg/super_node/eth/mocks"
"github.com/vulcanize/vulcanizedb/pkg/super_node/shared"
diff --git a/pkg/super_node/eth/models.go b/pkg/super_node/eth/models.go
index 2a9e7309..91ee4d3c 100644
--- a/pkg/super_node/eth/models.go
+++ b/pkg/super_node/eth/models.go
@@ -26,6 +26,7 @@ type HeaderModel struct {
ParentHash string `db:"parent_hash"`
CID string `db:"cid"`
TotalDifficulty string `db:"td"`
+ NodeID int64 `db:"node_id"`
}
// UncleModel is the db model for eth.uncle_cids
diff --git a/pkg/super_node/eth/retriever.go b/pkg/super_node/eth/retriever.go
index ca7c2e81..d58baf0d 100644
--- a/pkg/super_node/eth/retriever.go
+++ b/pkg/super_node/eth/retriever.go
@@ -26,7 +26,7 @@ import (
"github.com/lib/pq"
log "github.com/sirupsen/logrus"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
"github.com/vulcanize/vulcanizedb/pkg/super_node/shared"
)
diff --git a/pkg/super_node/eth/retriever_test.go b/pkg/super_node/eth/retriever_test.go
index f5d3adc1..b010208f 100644
--- a/pkg/super_node/eth/retriever_test.go
+++ b/pkg/super_node/eth/retriever_test.go
@@ -22,7 +22,7 @@ import (
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
"github.com/vulcanize/vulcanizedb/pkg/super_node/eth"
eth2 "github.com/vulcanize/vulcanizedb/pkg/super_node/eth"
"github.com/vulcanize/vulcanizedb/pkg/super_node/eth/mocks"
@@ -236,6 +236,7 @@ var _ = Describe("Retriever", func() {
Expect(len(cidWrapper.Headers)).To(Equal(1))
expectedHeaderCIDs := mocks.MockCIDWrapper.Headers
expectedHeaderCIDs[0].ID = cidWrapper.Headers[0].ID
+ expectedHeaderCIDs[0].NodeID = cidWrapper.Headers[0].NodeID
Expect(cidWrapper.Headers).To(Equal(expectedHeaderCIDs))
Expect(len(cidWrapper.Transactions)).To(Equal(2))
Expect(eth.TxModelsContainsCID(cidWrapper.Transactions, mocks.MockCIDWrapper.Transactions[0].CID)).To(BeTrue())
diff --git a/pkg/super_node/eth/test_helpers.go b/pkg/super_node/eth/test_helpers.go
index d224e028..333600c9 100644
--- a/pkg/super_node/eth/test_helpers.go
+++ b/pkg/super_node/eth/test_helpers.go
@@ -19,7 +19,7 @@ package eth
import (
. "github.com/onsi/gomega"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
)
// TearDownDB is used to tear down the super node dbs after tests
diff --git a/pkg/super_node/service.go b/pkg/super_node/service.go
index 31756092..e16a51be 100644
--- a/pkg/super_node/service.go
+++ b/pkg/super_node/service.go
@@ -29,8 +29,8 @@ import (
log "github.com/sirupsen/logrus"
"github.com/vulcanize/vulcanizedb/pkg/eth/core"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
"github.com/vulcanize/vulcanizedb/pkg/ipfs"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
"github.com/vulcanize/vulcanizedb/pkg/super_node/shared"
)
diff --git a/pkg/super_node/shared/config.go b/pkg/super_node/shared/config.go
index 5cc9d1e6..f89ee0a3 100644
--- a/pkg/super_node/shared/config.go
+++ b/pkg/super_node/shared/config.go
@@ -17,22 +17,21 @@
package shared
import (
- "fmt"
- "os"
- "path/filepath"
- "time"
-
+ "github.com/btcsuite/btcd/rpcclient"
"github.com/ethereum/go-ethereum/ethclient"
"github.com/ethereum/go-ethereum/rpc"
"github.com/spf13/viper"
+ "os"
+ "path/filepath"
+ "time"
"github.com/vulcanize/vulcanizedb/pkg/config"
"github.com/vulcanize/vulcanizedb/pkg/eth"
"github.com/vulcanize/vulcanizedb/pkg/eth/client"
vRpc "github.com/vulcanize/vulcanizedb/pkg/eth/converters/rpc"
"github.com/vulcanize/vulcanizedb/pkg/eth/core"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
"github.com/vulcanize/vulcanizedb/pkg/eth/node"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
"github.com/vulcanize/vulcanizedb/utils"
)
@@ -93,7 +92,21 @@ func NewSuperNodeConfig() (*SuperNodeConfig, error) {
workers = 1
}
sn.Workers = workers
- sn.NodeInfo, sn.WSClient, err = getNodeAndClient(sn.Chain, viper.GetString("superNode.sync.wsPath"))
+ if sn.Chain == Ethereum {
+ sn.NodeInfo, sn.WSClient, err = getEthNodeAndClient(sn.Chain, viper.GetString("superNode.sync.wsPath"))
+ }
+ if sn.Chain == Bitcoin {
+ sn.NodeInfo = core.Node{
+ ID: "temporaryID",
+ ClientName: "omnicored",
+ GenesisBlock: "000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f",
+ NetworkID: "0xD9B4BEF9",
+ }
+ sn.WSClient = &rpcclient.ConnConfig{
+ Host: viper.GetString("superNode.sync.wsPath"),
+ Endpoint: "ws",
+ }
+ }
}
if sn.Serve {
wsPath := viper.GetString("superNode.server.wsPath")
@@ -130,9 +143,18 @@ func NewSuperNodeConfig() (*SuperNodeConfig, error) {
// BackFillFields is used to fill in the BackFill fields of the config
func (sn *SuperNodeConfig) BackFillFields() error {
sn.BackFill = true
- _, httpClient, err := getNodeAndClient(sn.Chain, viper.GetString("superNode.backFill.httpPath"))
- if err != nil {
- return err
+ var httpClient interface{}
+ var err error
+ if sn.Chain == Ethereum {
+ _, httpClient, err = getEthNodeAndClient(sn.Chain, viper.GetString("superNode.backFill.httpPath"))
+ if err != nil {
+ return err
+ }
+ }
+ if sn.Chain == Bitcoin {
+ httpClient = &rpcclient.ConnConfig{
+ Host: viper.GetString("superNode.backFill.httpPath"),
+ }
}
sn.HTTPClient = httpClient
freq := viper.GetInt("superNode.backFill.frequency")
@@ -147,21 +169,16 @@ func (sn *SuperNodeConfig) BackFillFields() error {
return nil
}
-func getNodeAndClient(chain ChainType, path string) (core.Node, interface{}, error) {
- switch chain {
- case Ethereum:
- rawRPCClient, err := rpc.Dial(path)
- if err != nil {
- return core.Node{}, nil, err
- }
- rpcClient := client.NewRPCClient(rawRPCClient, path)
- ethClient := ethclient.NewClient(rawRPCClient)
- vdbEthClient := client.NewEthClient(ethClient)
- vdbNode := node.MakeNode(rpcClient)
- transactionConverter := vRpc.NewRPCTransactionConverter(ethClient)
- blockChain := eth.NewBlockChain(vdbEthClient, rpcClient, vdbNode, transactionConverter)
- return blockChain.Node(), rpcClient, nil
- default:
- return core.Node{}, nil, fmt.Errorf("unrecognized chain type %s", chain.String())
+func getEthNodeAndClient(chain ChainType, path string) (core.Node, interface{}, error) {
+ rawRPCClient, err := rpc.Dial(path)
+ if err != nil {
+ return core.Node{}, nil, err
}
+ rpcClient := client.NewRPCClient(rawRPCClient, path)
+ ethClient := ethclient.NewClient(rawRPCClient)
+ vdbEthClient := client.NewEthClient(ethClient)
+ vdbNode := node.MakeNode(rpcClient)
+ transactionConverter := vRpc.NewRPCTransactionConverter(ethClient)
+ blockChain := eth.NewBlockChain(vdbEthClient, rpcClient, vdbNode, transactionConverter)
+ return blockChain.Node(), rpcClient, nil
}
diff --git a/pkg/super_node/shared/mocks/retriever.go b/pkg/super_node/shared/mocks/retriever.go
index 266fe108..c98a1c32 100644
--- a/pkg/super_node/shared/mocks/retriever.go
+++ b/pkg/super_node/shared/mocks/retriever.go
@@ -17,7 +17,7 @@
package mocks
import (
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
"github.com/vulcanize/vulcanizedb/pkg/super_node/shared"
)
diff --git a/pkg/super_node/shared/test_helpers.go b/pkg/super_node/shared/test_helpers.go
index 1b2ee606..81cc6e3b 100644
--- a/pkg/super_node/shared/test_helpers.go
+++ b/pkg/super_node/shared/test_helpers.go
@@ -19,7 +19,7 @@ package shared
import (
"github.com/vulcanize/vulcanizedb/pkg/config"
"github.com/vulcanize/vulcanizedb/pkg/eth/core"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
)
// SetupDB is use to setup a db for super node tests
diff --git a/test_config/test_config.go b/test_config/test_config.go
index 72ba8fcb..96969589 100644
--- a/test_config/test_config.go
+++ b/test_config/test_config.go
@@ -19,13 +19,11 @@ package test_config
import (
"errors"
"fmt"
- . "github.com/onsi/gomega"
"github.com/sirupsen/logrus"
"github.com/spf13/viper"
"github.com/vulcanize/vulcanizedb/pkg/config"
"github.com/vulcanize/vulcanizedb/pkg/eth/core"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres/repositories"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
"os"
)
@@ -88,22 +86,15 @@ func NewTestDB(node core.Node) *postgres.DB {
func CleanTestDB(db *postgres.DB) {
db.MustExec("DELETE FROM addresses")
db.MustExec("DELETE FROM blocks")
- db.MustExec("DELETE FROM eth_blocks")
db.MustExec("DELETE FROM checked_headers")
- // can't delete from eth_nodes since this function is called after the required eth_node is persisted
- db.MustExec("DELETE FROM full_sync_logs")
- db.MustExec("DELETE FROM full_sync_receipts")
- db.MustExec("DELETE FROM full_sync_transactions")
+ // can't delete from nodes since this function is called after the required node is persisted
db.MustExec("DELETE FROM goose_db_version")
db.MustExec("DELETE FROM header_sync_logs")
db.MustExec("DELETE FROM header_sync_receipts")
db.MustExec("DELETE FROM header_sync_transactions")
db.MustExec("DELETE FROM headers")
- db.MustExec("DELETE FROM log_filters")
db.MustExec("DELETE FROM queued_storage")
db.MustExec("DELETE FROM storage_diff")
- db.MustExec("DELETE FROM watched_contracts")
- db.MustExec("DELETE FROM watched_logs")
}
func CleanCheckedHeadersTable(db *postgres.DB, columnNames []string) {
@@ -116,15 +107,8 @@ func CleanCheckedHeadersTable(db *postgres.DB, columnNames []string) {
func NewTestNode() core.Node {
return core.Node{
GenesisBlock: "GENESIS",
- NetworkID: 1,
+ NetworkID: "1",
ID: "b6f90c0fdd8ec9607aed8ee45c69322e47b7063f0bfb7a29c8ecafab24d0a22d24dd2329b5ee6ed4125a03cb14e57fd584e67f9e53e6c631055cbbd82f080845",
ClientName: "Geth/v1.7.2-stable-1db4ecdc/darwin-amd64/go1.9",
}
}
-
-func NewTestBlock(blockNumber int64, repository repositories.BlockRepository) int64 {
- blockID, err := repository.CreateOrUpdateBlock(core.Block{Number: blockNumber})
- Expect(err).NotTo(HaveOccurred())
-
- return blockID
-}
diff --git a/utils/utils.go b/utils/utils.go
index 87608f94..2e852790 100644
--- a/utils/utils.go
+++ b/utils/utils.go
@@ -27,7 +27,7 @@ import (
"github.com/vulcanize/vulcanizedb/pkg/config"
"github.com/vulcanize/vulcanizedb/pkg/eth"
"github.com/vulcanize/vulcanizedb/pkg/eth/core"
- "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres"
+ "github.com/vulcanize/vulcanizedb/pkg/postgres"
)
func LoadPostgres(database config.Database, node core.Node) postgres.DB {