From 558599dd324737d1e4f3d949bcb297ce75461ea9 Mon Sep 17 00:00:00 2001 From: Ian Norden Date: Mon, 31 Aug 2020 10:42:01 -0500 Subject: [PATCH 01/12] remove btc stuff --- .travis.yml | 27 - cmd/resync.go | 110 --- db/migrations/00011_create_btc_schema.sql | 5 - ...=> 00011_create_postgraphile_comments.sql} | 0 .../00012_create_btc_header_cids_table.sql | 17 - ...0013_create_btc_transaction_cids_table.sql | 14 - .../00014_create_btc_tx_outputs_table.sql | 15 - .../00015_create_btc_tx_inputs_table.sql | 14 - documentation/architecture.md | 132 ---- documentation/resync.md | 70 -- documentation/watcher.md | 16 - environments/superNodeBTC.toml | 48 -- pkg/btc/btc_suite_test.go | 35 - pkg/btc/cid_retriever.go | 302 -------- pkg/btc/cleaner.go | 193 ----- pkg/btc/cleaner_test.go | 354 --------- pkg/btc/converter.go | 102 --- pkg/btc/converter_test.go | 43 -- pkg/btc/filterer.go | 159 ---- pkg/btc/http_streamer.go | 104 --- pkg/btc/indexer.go | 132 ---- pkg/btc/indexer_test.go | 94 --- pkg/btc/ipld_fetcher.go | 107 --- pkg/btc/mocks/converter.go | 64 -- pkg/btc/mocks/indexer.go | 40 - pkg/btc/mocks/publisher.go | 65 -- pkg/btc/mocks/test_data.go | 709 ------------------ pkg/btc/models.go | 82 -- pkg/btc/payload_fetcher.go | 76 -- pkg/btc/publisher.go | 120 --- pkg/btc/publisher_test.go | 120 --- pkg/btc/streamer.go | 86 --- pkg/btc/subscription_config.go | 115 --- pkg/btc/test_helpers.go | 43 -- pkg/btc/types.go | 76 -- pkg/config/config_suite_test.go | 29 - pkg/config/config_test.go | 48 -- pkg/config/database.go | 78 -- pkg/eth/cleaner.go | 356 --------- pkg/eth/cleaner_test.go | 698 ----------------- pkg/eth/converter.go | 155 ---- pkg/eth/converter_test.go | 54 -- pkg/eth/indexer.go | 206 ----- pkg/eth/indexer_test.go | 137 ---- pkg/eth/mocks/test_data.go | 582 -------------- pkg/eth/models.go | 126 ---- pkg/eth/payload_fetcher.go | 88 --- pkg/eth/payload_fetcher_test.go | 65 -- pkg/eth/publisher.go | 228 ------ pkg/eth/publisher_test.go | 233 ------ pkg/eth/reward.go | 76 -- pkg/eth/streamer.go | 72 -- pkg/eth/streamer_test.go | 34 - pkg/eth/types.go | 112 --- pkg/historical/config.go | 135 ---- pkg/historical/historical_suite_test.go | 35 - pkg/historical/service.go | 196 ----- pkg/historical/service_test.go | 180 ----- pkg/ipfs/ipld/btc_header.go | 183 ----- pkg/ipfs/ipld/btc_parser.go | 74 -- pkg/ipfs/ipld/btc_tx.go | 258 ------- pkg/ipfs/ipld/btc_tx_trie.go | 110 --- pkg/ipfs/ipld/eth_account.go | 175 ----- pkg/ipfs/ipld/eth_header.go | 256 ------- pkg/ipfs/ipld/eth_parser.go | 97 --- pkg/ipfs/ipld/eth_receipt.go | 199 ----- pkg/ipfs/ipld/eth_receipt_trie.go | 152 ---- pkg/ipfs/ipld/eth_state.go | 114 --- pkg/ipfs/ipld/eth_storage.go | 100 --- pkg/ipfs/ipld/eth_tx.go | 215 ------ pkg/ipfs/ipld/eth_tx_trie.go | 152 ---- pkg/ipfs/ipld/shared.go | 151 ---- pkg/ipfs/ipld/trie_node.go | 444 ----------- pkg/ipfs/models.go | 22 - pkg/node/node.go | 25 - pkg/postgres/errors.go | 37 - pkg/postgres/postgres.go | 76 -- pkg/postgres/postgres_suite_test.go | 36 - pkg/postgres/postgres_test.go | 104 --- pkg/resync/config.go | 128 ---- pkg/resync/service.go | 174 ----- pkg/shared/data_type.go | 144 ---- pkg/shared/intefaces.go | 83 -- pkg/shared/types.go | 41 - pkg/watch/service_test.go | 72 -- pkg/watch/watch_suite_test.go | 35 - temp_rsa.enc | Bin 1856 -> 0 bytes 87 files changed, 11269 deletions(-) delete mode 100644 .travis.yml delete mode 100644 cmd/resync.go delete mode 100644 db/migrations/00011_create_btc_schema.sql rename db/migrations/{00016_create_postgraphile_comments.sql => 00011_create_postgraphile_comments.sql} (100%) delete mode 100644 db/migrations/00012_create_btc_header_cids_table.sql delete mode 100644 db/migrations/00013_create_btc_transaction_cids_table.sql delete mode 100644 db/migrations/00014_create_btc_tx_outputs_table.sql delete mode 100644 db/migrations/00015_create_btc_tx_inputs_table.sql delete mode 100644 documentation/architecture.md delete mode 100644 documentation/resync.md delete mode 100644 documentation/watcher.md delete mode 100644 environments/superNodeBTC.toml delete mode 100644 pkg/btc/btc_suite_test.go delete mode 100644 pkg/btc/cid_retriever.go delete mode 100644 pkg/btc/cleaner.go delete mode 100644 pkg/btc/cleaner_test.go delete mode 100644 pkg/btc/converter.go delete mode 100644 pkg/btc/converter_test.go delete mode 100644 pkg/btc/filterer.go delete mode 100644 pkg/btc/http_streamer.go delete mode 100644 pkg/btc/indexer.go delete mode 100644 pkg/btc/indexer_test.go delete mode 100644 pkg/btc/ipld_fetcher.go delete mode 100644 pkg/btc/mocks/converter.go delete mode 100644 pkg/btc/mocks/indexer.go delete mode 100644 pkg/btc/mocks/publisher.go delete mode 100644 pkg/btc/mocks/test_data.go delete mode 100644 pkg/btc/models.go delete mode 100644 pkg/btc/payload_fetcher.go delete mode 100644 pkg/btc/publisher.go delete mode 100644 pkg/btc/publisher_test.go delete mode 100644 pkg/btc/streamer.go delete mode 100644 pkg/btc/subscription_config.go delete mode 100644 pkg/btc/test_helpers.go delete mode 100644 pkg/btc/types.go delete mode 100644 pkg/config/config_suite_test.go delete mode 100644 pkg/config/config_test.go delete mode 100644 pkg/config/database.go delete mode 100644 pkg/eth/cleaner.go delete mode 100644 pkg/eth/cleaner_test.go delete mode 100644 pkg/eth/converter.go delete mode 100644 pkg/eth/converter_test.go delete mode 100644 pkg/eth/indexer.go delete mode 100644 pkg/eth/indexer_test.go delete mode 100644 pkg/eth/mocks/test_data.go delete mode 100644 pkg/eth/models.go delete mode 100644 pkg/eth/payload_fetcher.go delete mode 100644 pkg/eth/payload_fetcher_test.go delete mode 100644 pkg/eth/publisher.go delete mode 100644 pkg/eth/publisher_test.go delete mode 100644 pkg/eth/reward.go delete mode 100644 pkg/eth/streamer.go delete mode 100644 pkg/eth/streamer_test.go delete mode 100644 pkg/eth/types.go delete mode 100644 pkg/historical/config.go delete mode 100644 pkg/historical/historical_suite_test.go delete mode 100644 pkg/historical/service.go delete mode 100644 pkg/historical/service_test.go delete mode 100644 pkg/ipfs/ipld/btc_header.go delete mode 100644 pkg/ipfs/ipld/btc_parser.go delete mode 100644 pkg/ipfs/ipld/btc_tx.go delete mode 100644 pkg/ipfs/ipld/btc_tx_trie.go delete mode 100644 pkg/ipfs/ipld/eth_account.go delete mode 100644 pkg/ipfs/ipld/eth_header.go delete mode 100644 pkg/ipfs/ipld/eth_parser.go delete mode 100644 pkg/ipfs/ipld/eth_receipt.go delete mode 100644 pkg/ipfs/ipld/eth_receipt_trie.go delete mode 100644 pkg/ipfs/ipld/eth_state.go delete mode 100644 pkg/ipfs/ipld/eth_storage.go delete mode 100644 pkg/ipfs/ipld/eth_tx.go delete mode 100644 pkg/ipfs/ipld/eth_tx_trie.go delete mode 100644 pkg/ipfs/ipld/shared.go delete mode 100644 pkg/ipfs/ipld/trie_node.go delete mode 100644 pkg/ipfs/models.go delete mode 100644 pkg/node/node.go delete mode 100644 pkg/postgres/errors.go delete mode 100644 pkg/postgres/postgres.go delete mode 100644 pkg/postgres/postgres_suite_test.go delete mode 100644 pkg/postgres/postgres_test.go delete mode 100644 pkg/resync/config.go delete mode 100644 pkg/resync/service.go delete mode 100644 pkg/shared/data_type.go delete mode 100644 pkg/shared/intefaces.go delete mode 100644 pkg/shared/types.go delete mode 100644 pkg/watch/service_test.go delete mode 100644 pkg/watch/watch_suite_test.go delete mode 100644 temp_rsa.enc diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 86af4c95..00000000 --- a/.travis.yml +++ /dev/null @@ -1,27 +0,0 @@ -dist: trusty -language: go -go: -- 1.12 -services: -- postgresql -addons: - ssh_known_hosts: arch1.vdb.to - postgresql: '11.2' -go_import_path: github.com/vulcanize/ipfs-blockchain-watcher -before_install: -- openssl aes-256-cbc -K $encrypted_e1db309e8776_key -iv $encrypted_e1db309e8776_iv - -in temp_rsa.enc -out temp_rsa -d -- eval "$(ssh-agent -s)" -- chmod 600 temp_rsa -- ssh-add temp_rsa -- ssh -4 -fNL 8545:localhost:8545 geth@arch1.vdb.to -- make installtools -- bash ./scripts/install-postgres-11.sh -- curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | sudo apt-key add - -- echo "deb https://dl.yarnpkg.com/debian/ stable main" | sudo tee /etc/apt/sources.list.d/yarn.list -- sudo apt-get update && sudo apt-get install yarn -script: -- env GO111MODULE=on make test -- env GO111MODULE=on make integrationtest -notifications: - email: false diff --git a/cmd/resync.go b/cmd/resync.go deleted file mode 100644 index 945707f6..00000000 --- a/cmd/resync.go +++ /dev/null @@ -1,110 +0,0 @@ -// Copyright © 2020 Vulcanize, Inc -// -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. -// -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. -// -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package cmd - -import ( - log "github.com/sirupsen/logrus" - "github.com/spf13/cobra" - "github.com/spf13/viper" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/resync" - v "github.com/vulcanize/ipfs-blockchain-watcher/version" -) - -// resyncCmd represents the resync command -var resyncCmd = &cobra.Command{ - Use: "resync", - Short: "Resync historical data", - Long: `Use this command to fill in sections of missing data in the ipfs-blockchain-watcher database`, - Run: func(cmd *cobra.Command, args []string) { - subCommand = cmd.CalledAs() - logWithCommand = *log.WithField("SubCommand", subCommand) - rsyncCmdCommand() - }, -} - -func rsyncCmdCommand() { - logWithCommand.Infof("running ipfs-blockchain-watcher version: %s", v.VersionWithMeta) - logWithCommand.Debug("loading resync configuration variables") - rConfig, err := resync.NewConfig() - if err != nil { - logWithCommand.Fatal(err) - } - logWithCommand.Infof("resync config: %+v", rConfig) - logWithCommand.Debug("initializing new resync service") - rService, err := resync.NewResyncService(rConfig) - if err != nil { - logWithCommand.Fatal(err) - } - logWithCommand.Info("starting up resync process") - if err := rService.Resync(); err != nil { - logWithCommand.Fatal(err) - } - logWithCommand.Infof("%s %s resync finished", rConfig.Chain.String(), rConfig.ResyncType.String()) -} - -func init() { - rootCmd.AddCommand(resyncCmd) - - // flags - resyncCmd.PersistentFlags().String("resync-chain", "", "which chain to support, options are currently Ethereum or Bitcoin.") - resyncCmd.PersistentFlags().String("resync-type", "", "which type of data to resync") - resyncCmd.PersistentFlags().Int("resync-start", 0, "block height to start resync") - resyncCmd.PersistentFlags().Int("resync-stop", 0, "block height to stop resync") - resyncCmd.PersistentFlags().Int("resync-batch-size", 0, "data fetching batch size") - resyncCmd.PersistentFlags().Int("resync-batch-number", 0, "how many goroutines to fetch data concurrently") - resyncCmd.PersistentFlags().Bool("resync-clear-old-cache", false, "if true, clear out old data of the provided type within the resync range before resyncing") - resyncCmd.PersistentFlags().Bool("resync-reset-validation", false, "if true, reset times_validated to 0") - resyncCmd.PersistentFlags().Int("resync-timeout", 15, "timeout used for resync http requests") - - resyncCmd.PersistentFlags().String("btc-http-path", "", "http url for bitcoin node") - resyncCmd.PersistentFlags().String("btc-password", "", "password for btc node") - resyncCmd.PersistentFlags().String("btc-username", "", "username for btc node") - resyncCmd.PersistentFlags().String("btc-node-id", "", "btc node id") - resyncCmd.PersistentFlags().String("btc-client-name", "", "btc client name") - resyncCmd.PersistentFlags().String("btc-genesis-block", "", "btc genesis block hash") - resyncCmd.PersistentFlags().String("btc-network-id", "", "btc network id") - - resyncCmd.PersistentFlags().String("eth-http-path", "", "http url for ethereum node") - resyncCmd.PersistentFlags().String("eth-node-id", "", "eth node id") - resyncCmd.PersistentFlags().String("eth-client-name", "", "eth client name") - resyncCmd.PersistentFlags().String("eth-genesis-block", "", "eth genesis block hash") - resyncCmd.PersistentFlags().String("eth-network-id", "", "eth network id") - - // and their bindings - viper.BindPFlag("resync.chain", resyncCmd.PersistentFlags().Lookup("resync-chain")) - viper.BindPFlag("resync.type", resyncCmd.PersistentFlags().Lookup("resync-type")) - viper.BindPFlag("resync.start", resyncCmd.PersistentFlags().Lookup("resync-start")) - viper.BindPFlag("resync.stop", resyncCmd.PersistentFlags().Lookup("resync-stop")) - viper.BindPFlag("resync.batchSize", resyncCmd.PersistentFlags().Lookup("resync-batch-size")) - viper.BindPFlag("resync.batchNumber", resyncCmd.PersistentFlags().Lookup("resync-batch-number")) - viper.BindPFlag("resync.clearOldCache", resyncCmd.PersistentFlags().Lookup("resync-clear-old-cache")) - viper.BindPFlag("resync.resetValidation", resyncCmd.PersistentFlags().Lookup("resync-reset-validation")) - viper.BindPFlag("resync.timeout", resyncCmd.PersistentFlags().Lookup("resync-timeout")) - - viper.BindPFlag("bitcoin.httpPath", resyncCmd.PersistentFlags().Lookup("btc-http-path")) - viper.BindPFlag("bitcoin.pass", resyncCmd.PersistentFlags().Lookup("btc-password")) - viper.BindPFlag("bitcoin.user", resyncCmd.PersistentFlags().Lookup("btc-username")) - viper.BindPFlag("bitcoin.nodeID", resyncCmd.PersistentFlags().Lookup("btc-node-id")) - viper.BindPFlag("bitcoin.clientName", resyncCmd.PersistentFlags().Lookup("btc-client-name")) - viper.BindPFlag("bitcoin.genesisBlock", resyncCmd.PersistentFlags().Lookup("btc-genesis-block")) - viper.BindPFlag("bitcoin.networkID", resyncCmd.PersistentFlags().Lookup("btc-network-id")) - - viper.BindPFlag("ethereum.httpPath", resyncCmd.PersistentFlags().Lookup("eth-http-path")) - viper.BindPFlag("ethereum.nodeID", resyncCmd.PersistentFlags().Lookup("eth-node-id")) - viper.BindPFlag("ethereum.clientName", resyncCmd.PersistentFlags().Lookup("eth-client-name")) - viper.BindPFlag("ethereum.genesisBlock", resyncCmd.PersistentFlags().Lookup("eth-genesis-block")) - viper.BindPFlag("ethereum.networkID", resyncCmd.PersistentFlags().Lookup("eth-network-id")) -} diff --git a/db/migrations/00011_create_btc_schema.sql b/db/migrations/00011_create_btc_schema.sql deleted file mode 100644 index e95dd926..00000000 --- a/db/migrations/00011_create_btc_schema.sql +++ /dev/null @@ -1,5 +0,0 @@ --- +goose Up -CREATE SCHEMA btc; - --- +goose Down -DROP SCHEMA btc; \ No newline at end of file diff --git a/db/migrations/00016_create_postgraphile_comments.sql b/db/migrations/00011_create_postgraphile_comments.sql similarity index 100% rename from db/migrations/00016_create_postgraphile_comments.sql rename to db/migrations/00011_create_postgraphile_comments.sql diff --git a/db/migrations/00012_create_btc_header_cids_table.sql b/db/migrations/00012_create_btc_header_cids_table.sql deleted file mode 100644 index fcdb075d..00000000 --- a/db/migrations/00012_create_btc_header_cids_table.sql +++ /dev/null @@ -1,17 +0,0 @@ --- +goose Up -CREATE TABLE btc.header_cids ( - id SERIAL PRIMARY KEY, - block_number BIGINT NOT NULL, - block_hash VARCHAR(66) NOT NULL, - parent_hash VARCHAR(66) NOT NULL, - cid TEXT NOT NULL, - mh_key TEXT NOT NULL REFERENCES public.blocks (key) ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED, - timestamp NUMERIC NOT NULL, - bits BIGINT NOT NULL, - node_id INTEGER NOT NULL REFERENCES nodes (id) ON DELETE CASCADE, - times_validated INTEGER NOT NULL DEFAULT 1, - UNIQUE (block_number, block_hash) -); - --- +goose Down -DROP TABLE btc.header_cids; \ No newline at end of file diff --git a/db/migrations/00013_create_btc_transaction_cids_table.sql b/db/migrations/00013_create_btc_transaction_cids_table.sql deleted file mode 100644 index aabf8af9..00000000 --- a/db/migrations/00013_create_btc_transaction_cids_table.sql +++ /dev/null @@ -1,14 +0,0 @@ --- +goose Up -CREATE TABLE btc.transaction_cids ( - id SERIAL PRIMARY KEY, - header_id INTEGER NOT NULL REFERENCES btc.header_cids (id) ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED, - index INTEGER NOT NULL, - tx_hash VARCHAR(66) NOT NULL UNIQUE, - cid TEXT NOT NULL, - mh_key TEXT NOT NULL REFERENCES public.blocks (key) ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED, - segwit BOOL NOT NULL, - witness_hash VARCHAR(66) -); - --- +goose Down -DROP TABLE btc.transaction_cids; \ No newline at end of file diff --git a/db/migrations/00014_create_btc_tx_outputs_table.sql b/db/migrations/00014_create_btc_tx_outputs_table.sql deleted file mode 100644 index d7d340ba..00000000 --- a/db/migrations/00014_create_btc_tx_outputs_table.sql +++ /dev/null @@ -1,15 +0,0 @@ --- +goose Up -CREATE TABLE btc.tx_outputs ( - id SERIAL PRIMARY KEY, - tx_id INTEGER NOT NULL REFERENCES btc.transaction_cids (id) ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED, - index INTEGER NOT NULL, - value BIGINT NOT NULL, - pk_script BYTEA NOT NULL, - script_class INTEGER NOT NULL, - addresses VARCHAR(66)[], - required_sigs INTEGER NOT NULL, - UNIQUE (tx_id, index) -); - --- +goose Down -DROP TABLE btc.tx_outputs; \ No newline at end of file diff --git a/db/migrations/00015_create_btc_tx_inputs_table.sql b/db/migrations/00015_create_btc_tx_inputs_table.sql deleted file mode 100644 index 0a4fd17d..00000000 --- a/db/migrations/00015_create_btc_tx_inputs_table.sql +++ /dev/null @@ -1,14 +0,0 @@ --- +goose Up -CREATE TABLE btc.tx_inputs ( - id SERIAL PRIMARY KEY, - tx_id INTEGER NOT NULL REFERENCES btc.transaction_cids (id) ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED, - index INTEGER NOT NULL, - witness VARCHAR[], - sig_script BYTEA NOT NULL, - outpoint_tx_hash VARCHAR(66) NOT NULL, - outpoint_index NUMERIC NOT NULL, - UNIQUE (tx_id, index) -); - --- +goose Down -DROP TABLE btc.tx_inputs; \ No newline at end of file diff --git a/documentation/architecture.md b/documentation/architecture.md deleted file mode 100644 index dd18e100..00000000 --- a/documentation/architecture.md +++ /dev/null @@ -1,132 +0,0 @@ -# ipfs-blockchain-watcher architecture -1. [Processes](#processes) -1. [Command](#command) -1. [Configuration](#config) -1. [Database](#database) -1. [APIs](#apis) -1. [Resync](#resync) -1. [IPFS Considerations](#ipfs-considerations) - -## Processes -ipfs-blockchain-watcher is a [service](../pkg/watch/service.go#L61) comprised of the following interfaces: - -* [Payload Fetcher](../pkg/shared/interfaces.go#L29): Fetches raw chain data from a half-duplex endpoint (HTTP/IPC), used for historical data fetching. ([BTC](../pkg/btc/payload_fetcher.go), [ETH](../pkg/eth/payload_fetcher.go)). -* [Payload Streamer](../pkg/shared/interfaces.go#L24): Streams raw chain data from a full-duplex endpoint (WebSocket/IPC), used for syncing data at the head of the chain in real-time. ([BTC](../pkg/btc/http_streamer.go), [ETH](../pkg/eth/streamer.go)). -* [Payload Converter](../pkg/shared/interfaces.go#L34): Converters raw chain data to an intermediary form prepared for IPFS publishing. ([BTC](../pkg/btc/converter.go), [ETH](../pkg/eth/converter.go)). -* [IPLD Publisher](../pkg/shared/interfaces.go#L39): Publishes the converted data to IPFS, returning their CIDs and associated metadata for indexing. ([BTC](../pkg/btc/publisher.go), [ETH](../pkg/eth/publisher.go)). -* [CID Indexer](../pkg/shared/interfaces.go#L44): Indexes CIDs in Postgres with their associated metadata. This metadata is chain specific and selected based on utility. ([BTC](../pkg/btc/indexer.go), [ETH](../pkg/eth/indexer.go)). -* [CID Retriever](../pkg/shared/interfaces.go#L54): Retrieves CIDs from Postgres by searching against their associated metadata, is used to lookup data to serve API requests/subscriptions. ([BTC](../pkg/btc/retriever.go), [ETH](../pkg/eth/retriever.go)). -* [IPLD Fetcher](../pkg/shared/interfaces.go#L62): Fetches the IPLDs needed to service API requests/subscriptions from IPFS using retrieved CIDS; can route through a IPFS block-exchange to search for objects that are not directly available. ([BTC](../pkg/btc/ipld_fetcher.go), [ETH](../pkg/eth/ipld_fetcher.go)) -* [Response Filterer](../pkg/shared/interfaces.go#L49): Filters converted data payloads served to API subscriptions; filters according to the subscriber provided parameters. ([BTC](../pkg/btc/filterer.go), [ETH](../pkg/eth/filterer.go)). -* [API](https://github.com/ethereum/go-ethereum/blob/master/rpc/types.go#L31): Expose RPC methods for clients to interface with the data. Chain-specific APIs should aim to recapitulate as much of the native API as possible. ([VDB](../pkg/api.go), [ETH](../pkg/eth/api.go)). - - -Appropriating the service for a new chain is done by creating underlying types to satisfy these interfaces for -the specifics of that chain. - -The service uses these interfaces to operate in any combination of three modes: `sync`, `serve`, and `backfill`. -* Sync: Streams raw chain data at the head, converts and publishes it to IPFS, and indexes the resulting set of CIDs in Postgres with useful metadata. -* BackFill: Automatically searches for and detects gaps in the DB; fetches, converts, publishes, and indexes the data to fill these gaps. -* Serve: Opens up IPC, HTTP, and WebSocket servers on top of the ipfs-blockchain-watcher DB and any concurrent sync and/or backfill processes. - - -These three modes are all operated through a single vulcanizeDB command: `watch` - -## Command - -Usage: `./ipfs-blockchain-watcher watch --config={config.toml}` - -Configuration can also be done through CLI options and/or environmental variables. -CLI options can be found using `./ipfs-blockchain-watcher watch --help`. - -## Config - -Below is the set of universal config parameters for the ipfs-blockchain-watcher command, in .toml form, with the respective environmental variables commented to the side. -This set of parameters needs to be set no matter the chain type. - -```toml -[database] - name = "vulcanize_public" # $DATABASE_NAME - hostname = "localhost" # $DATABASE_HOSTNAME - port = 5432 # $DATABASE_PORT - user = "vdbm" # $DATABASE_USER - password = "" # $DATABASE_PASSWORD - -[watcher] - chain = "bitcoin" # $SUPERNODE_CHAIN - server = true # $SUPERNODE_SERVER - ipcPath = "~/.vulcanize/vulcanize.ipc" # $SUPERNODE_IPC_PATH - wsPath = "127.0.0.1:8082" # $SUPERNODE_WS_PATH - httpPath = "127.0.0.1:8083" # $SUPERNODE_HTTP_PATH - sync = true # $SUPERNODE_SYNC - workers = 1 # $SUPERNODE_WORKERS - backFill = true # $SUPERNODE_BACKFILL - frequency = 45 # $SUPERNODE_FREQUENCY - batchSize = 1 # $SUPERNODE_BATCH_SIZE - batchNumber = 50 # $SUPERNODE_BATCH_NUMBER - timeout = 300 # $HTTP_TIMEOUT - validationLevel = 1 # $SUPERNODE_VALIDATION_LEVEL -``` - -Additional parameters need to be set depending on the specific chain. - -For Bitcoin: - -```toml -[bitcoin] - wsPath = "127.0.0.1:8332" # $BTC_WS_PATH - httpPath = "127.0.0.1:8332" # $BTC_HTTP_PATH - pass = "password" # $BTC_NODE_PASSWORD - user = "username" # $BTC_NODE_USER - nodeID = "ocd0" # $BTC_NODE_ID - clientName = "Omnicore" # $BTC_CLIENT_NAME - genesisBlock = "000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f" # $BTC_GENESIS_BLOCK - networkID = "0xD9B4BEF9" # $BTC_NETWORK_ID -``` - -For Ethereum: - -```toml -[ethereum] - wsPath = "127.0.0.1:8546" # $ETH_WS_PATH - httpPath = "127.0.0.1:8545" # $ETH_HTTP_PATH - nodeID = "arch1" # $ETH_NODE_ID - clientName = "Geth" # $ETH_CLIENT_NAME - genesisBlock = "0xd4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3" # $ETH_GENESIS_BLOCK - networkID = "1" # $ETH_NETWORK_ID -``` - -## Database - -Currently, ipfs-blockchain-watcher persists all data to a single Postgres database. The migrations for this DB can be found [here](../db/migrations). -Chain-specific data is populated under a chain-specific schema (e.g. `eth` and `btc`) while shared data- such as the IPFS blocks table- is populated under the `public` schema. -Subsequent watchers which act on the raw chain data should build and populate their own schemas or separate databases entirely. - -In the future, the database architecture will be moving to a foreign table based architecture wherein a single db is used for shared data while each watcher uses -its own database and accesses and acts on the shared data through foreign tables. Isolating watchers to their own databases will prevent complications and -conflicts between watcher db migrations. - - -## APIs - -ipfs-blockchain-watcher provides mutliple types of APIs by which to interface with its data. -More detailed information on the APIs can be found [here](apis.md). - -## Resync - -A separate command `resync` is available for directing the resyncing of data within specified ranges. -This is useful if there is a need to re-validate a range of data using a new source or clean out bad/deprecated data. -More detailed information on this command can be found [here](resync.md). - -## IPFS Considerations - -Currently the IPLD Publisher and Fetcher can either use internalized IPFS processes which interface with a local IPFS repository, or can interface -directly with the backing Postgres database. -Both these options circumvent the need to run a full IPFS daemon with a [go-ipld-eth](https://github.com/ipfs/go-ipld-eth) or [go-ipld-btc](https://github.com/ipld/go-ipld-btc) plugin. -The former approach can lead to issues with lock-contention on the IPFS repo if another IPFS process is configured and running at the same $IPFS_PATH, it also necessitates the need for -a locally configured IPFS repository. The later bypasses the need for a configured IPFS repository/$IPFS_PATH and allows all Postgres write operations at a given block height -to occur in a single transaction, the only disadvantage is that by avoiding moving through an IPFS node intermediary the direct ability to reach out to the block -exchange for data not found locally is lost. - -Once go-ipld-eth and go-ipld-btc have been updated to work with a modern version of PG-IPFS, an additional option will be provided to direct -all publishing and fetching of IPLD objects through a remote IPFS daemon. \ No newline at end of file diff --git a/documentation/resync.md b/documentation/resync.md deleted file mode 100644 index b0de3c2e..00000000 --- a/documentation/resync.md +++ /dev/null @@ -1,70 +0,0 @@ -## ipfs-blockchain-watcher resync -The `resync` command is made available for directing the resyncing of ipfs-blockchain-watcherdata within specified ranges. -It also contains a utility for cleaning out old data, and resetting the validation level of data. - -### Rational - -Manual resyncing of data can be used to re-validate data within specific ranges using a new source. - -Option to remove data may be needed for bad/deprecated data or to prepare for breaking changes to the db schemas. - -Resetting the validation level of data is useful for designating ranges of data for resyncing by an ongoing ipfs-blockchain-watcher -backfill process. - -### Command - -Usage: `./ipfs-blockchain-watcher resync --config={config.toml}` - -Configuration can also be done through CLI options and/or environmental variables. -CLI options can be found using `./ipfs-blockchain-watcher resync --help`. - -### Config - -Below is the set of universal config parameters for the resync command, in .toml form, with the respective environmental variables commented to the side. -This set of parameters needs to be set no matter the chain type. - -```toml -[database] - name = "vulcanize_public" # $DATABASE_NAME - hostname = "localhost" # $DATABASE_HOSTNAME - port = 5432 # $DATABASE_PORT - user = "vdbm" # $DATABASE_USER - password = "" # $DATABASE_PASSWORD - -[resync] - chain = "ethereum" # $RESYNC_CHAIN - type = "state" # $RESYNC_TYPE - start = 0 # $RESYNC_START - stop = 1000 # $RESYNC_STOP - batchSize = 10 # $RESYNC_BATCH_SIZE - batchNumber = 100 # $RESYNC_BATCH_NUMBER - timeout = 300 # $HTTP_TIMEOUT - clearOldCache = true # $RESYNC_CLEAR_OLD_CACHE - resetValidation = true # $RESYNC_RESET_VALIDATION -``` - -Additional parameters need to be set depending on the specific chain. - -For Bitcoin: - -```toml -[bitcoin] - httpPath = "127.0.0.1:8332" # $BTC_HTTP_PATH - pass = "password" # $BTC_NODE_PASSWORD - user = "username" # $BTC_NODE_USER - nodeID = "ocd0" # $BTC_NODE_ID - clientName = "Omnicore" # $BTC_CLIENT_NAME - genesisBlock = "000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f" # $BTC_GENESIS_BLOCK - networkID = "0xD9B4BEF9" # $BTC_NETWORK_ID -``` - -For Ethereum: - -```toml -[ethereum] - httpPath = "127.0.0.1:8545" # $ETH_HTTP_PATH - nodeID = "arch1" # $ETH_NODE_ID - clientName = "Geth" # $ETH_CLIENT_NAME - genesisBlock = "0xd4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3" # $ETH_GENESIS_BLOCK - networkID = "1" # $ETH_NETWORK_ID -``` diff --git a/documentation/watcher.md b/documentation/watcher.md deleted file mode 100644 index c7748f6d..00000000 --- a/documentation/watcher.md +++ /dev/null @@ -1,16 +0,0 @@ -These are the components of a VulcanizeDB Watcher: -* Data Fetcher/Streamer sources: - * go-ethereum - * bitcoind - * btcd - * IPFS -* Transformers contain: - * converter - * publisher - * indexer -* Endpoints contain: - * api - * backend - * filterer - * retriever - * ipld_server \ No newline at end of file diff --git a/environments/superNodeBTC.toml b/environments/superNodeBTC.toml deleted file mode 100644 index fa72d41d..00000000 --- a/environments/superNodeBTC.toml +++ /dev/null @@ -1,48 +0,0 @@ -[database] - name = "vulcanize_public" # $DATABASE_NAME - hostname = "localhost" # $DATABASE_HOSTNAME - port = 5432 # $DATABASE_PORT - user = "vdbm" # $DATABASE_USER - password = "" # $DATABASE_PASSWORD - - [database.sync] - maxIdle = 1 - [database.backFill] - maxIdle = 5 - -[log] - level = "debug" # $LOGRUS_LEVEL - -[resync] - chain = "bitcoin" # $RESYNC_CHAIN - type = "full" # $RESYNC_TYPE - start = 0 # $RESYNC_START - stop = 0 # $RESYNC_STOP - batchSize = 5 # $RESYNC_BATCH_SIZE - batchNumber = 5 # $RESYNC_BATCH_NUMBER - clearOldCache = false # $RESYNC_CLEAR_OLD_CACHE - resetValidation = true # $RESYNC_RESET_VALIDATION - -[watcher] - chain = "bitcoin" # $SUPERNODE_CHAIN - server = true # $SUPERNODE_SERVER - ipcPath = "~/.vulcanize/vulcanize.ipc" # $SUPERNODE_IPC_PATH - wsPath = "127.0.0.1:8082" # $SUPERNODE_WS_PATH - httpPath = "127.0.0.1:8083" # $SUPERNODE_HTTP_PATH - sync = true # $SUPERNODE_SYNC - workers = 1 # $SUPERNODE_WORKERS - backFill = true # $SUPERNODE_BACKFILL - frequency = 45 # $SUPERNODE_FREQUENCY - batchSize = 5 # $SUPERNODE_BATCH_SIZE - batchNumber = 5 # $SUPERNODE_BATCH_NUMBER - validationLevel = 1 # $SUPERNODE_VALIDATION_LEVEL - -[bitcoin] - wsPath = "127.0.0.1:8332" # $BTC_WS_PATH - httpPath = "127.0.0.1:8332" # $BTC_HTTP_PATH - pass = "password" # $BTC_NODE_PASSWORD - user = "username" # $BTC_NODE_USER - nodeID = "ocd0" # $BTC_NODE_ID - clientName = "Omnicore" # $BTC_CLIENT_NAME - genesisBlock = "000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f" # $BTC_GENESIS_BLOCK - networkID = "0xD9B4BEF9" # $BTC_NETWORK_ID diff --git a/pkg/btc/btc_suite_test.go b/pkg/btc/btc_suite_test.go deleted file mode 100644 index 76720ce7..00000000 --- a/pkg/btc/btc_suite_test.go +++ /dev/null @@ -1,35 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package btc_test - -import ( - "io/ioutil" - "testing" - - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - "github.com/sirupsen/logrus" -) - -func TestBTCWatcher(t *testing.T) { - RegisterFailHandler(Fail) - RunSpecs(t, "BTC IPFS Watcher Suite Test") -} - -var _ = BeforeSuite(func() { - logrus.SetOutput(ioutil.Discard) -}) diff --git a/pkg/btc/cid_retriever.go b/pkg/btc/cid_retriever.go deleted file mode 100644 index 78acd489..00000000 --- a/pkg/btc/cid_retriever.go +++ /dev/null @@ -1,302 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package btc - -import ( - "database/sql" - "fmt" - "math/big" - - "github.com/ethereum/go-ethereum/common" - "github.com/jmoiron/sqlx" - "github.com/lib/pq" - log "github.com/sirupsen/logrus" - - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/postgres" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/shared" - "github.com/vulcanize/ipfs-blockchain-watcher/utils" -) - -// CIDRetriever satisfies the CIDRetriever interface for bitcoin -type CIDRetriever struct { - db *postgres.DB -} - -// NewCIDRetriever returns a pointer to a new CIDRetriever which supports the CIDRetriever interface -func NewCIDRetriever(db *postgres.DB) *CIDRetriever { - return &CIDRetriever{ - db: db, - } -} - -// RetrieveFirstBlockNumber is used to retrieve the first block number in the db -func (bcr *CIDRetriever) RetrieveFirstBlockNumber() (int64, error) { - var blockNumber int64 - err := bcr.db.Get(&blockNumber, "SELECT block_number FROM btc.header_cids ORDER BY block_number ASC LIMIT 1") - return blockNumber, err -} - -// RetrieveLastBlockNumber is used to retrieve the latest block number in the db -func (bcr *CIDRetriever) RetrieveLastBlockNumber() (int64, error) { - var blockNumber int64 - err := bcr.db.Get(&blockNumber, "SELECT block_number FROM btc.header_cids ORDER BY block_number DESC LIMIT 1 ") - return blockNumber, err -} - -// Retrieve is used to retrieve all of the CIDs which conform to the passed StreamFilters -func (bcr *CIDRetriever) Retrieve(filter shared.SubscriptionSettings, blockNumber int64) ([]shared.CIDsForFetching, bool, error) { - streamFilter, ok := filter.(*SubscriptionSettings) - if !ok { - return nil, true, fmt.Errorf("btc retriever expected filter type %T got %T", &SubscriptionSettings{}, filter) - } - log.Debug("retrieving cids") - - // Begin new db tx - tx, err := bcr.db.Beginx() - if err != nil { - return nil, true, err - } - defer func() { - if p := recover(); p != nil { - shared.Rollback(tx) - panic(p) - } else if err != nil { - shared.Rollback(tx) - } else { - err = tx.Commit() - } - }() - - // Retrieve cached header CIDs - headers, err := bcr.RetrieveHeaderCIDs(tx, blockNumber) - if err != nil { - log.Error("header cid retrieval error") - return nil, true, err - } - cws := make([]shared.CIDsForFetching, len(headers)) - empty := true - for i, header := range headers { - cw := new(CIDWrapper) - cw.BlockNumber = big.NewInt(blockNumber) - if !streamFilter.HeaderFilter.Off { - cw.Header = header - empty = false - } - // Retrieve cached trx CIDs - if !streamFilter.TxFilter.Off { - cw.Transactions, err = bcr.RetrieveTxCIDs(tx, streamFilter.TxFilter, header.ID) - if err != nil { - log.Error("transaction cid retrieval error") - return nil, true, err - } - if len(cw.Transactions) > 0 { - empty = false - } - } - cws[i] = cw - } - - return cws, empty, err -} - -// RetrieveHeaderCIDs retrieves and returns all of the header cids at the provided blockheight -func (bcr *CIDRetriever) RetrieveHeaderCIDs(tx *sqlx.Tx, blockNumber int64) ([]HeaderModel, error) { - log.Debug("retrieving header cids for block ", blockNumber) - headers := make([]HeaderModel, 0) - pgStr := `SELECT * FROM btc.header_cids - WHERE block_number = $1` - return headers, tx.Select(&headers, pgStr, blockNumber) -} - -// RetrieveTxCIDs retrieves and returns all of the trx cids at the provided blockheight that conform to the provided filter parameters -// also returns the ids for the returned transaction cids -func (bcr *CIDRetriever) RetrieveTxCIDs(tx *sqlx.Tx, txFilter TxFilter, headerID int64) ([]TxModel, error) { - log.Debug("retrieving transaction cids for header id ", headerID) - args := make([]interface{}, 0, 3) - results := make([]TxModel, 0) - id := 1 - pgStr := fmt.Sprintf(`SELECT transaction_cids.id, transaction_cids.header_id, - transaction_cids.tx_hash, transaction_cids.cid, transaction_cids.mh_key, - transaction_cids.segwit, transaction_cids.witness_hash, transaction_cids.index - FROM btc.transaction_cids, btc.header_cids, btc.tx_inputs, btc.tx_outputs - WHERE transaction_cids.header_id = header_cids.id - AND tx_inputs.tx_id = transaction_cids.id - AND tx_outputs.tx_id = transaction_cids.id - AND header_cids.id = $%d`, id) - args = append(args, headerID) - id++ - if txFilter.Segwit { - pgStr += ` AND transaction_cids.segwit = true` - } - if txFilter.MultiSig { - pgStr += ` AND tx_outputs.required_sigs > 1` - } - if len(txFilter.WitnessHashes) > 0 { - pgStr += fmt.Sprintf(` AND transaction_cids.witness_hash = ANY($%d::VARCHAR(66)[])`, id) - args = append(args, pq.Array(txFilter.WitnessHashes)) - id++ - } - if len(txFilter.Addresses) > 0 { - pgStr += fmt.Sprintf(` AND tx_outputs.addresses && $%d::VARCHAR(66)[]`, id) - args = append(args, pq.Array(txFilter.Addresses)) - id++ - } - if len(txFilter.Indexes) > 0 { - pgStr += fmt.Sprintf(` AND transaction_cids.index = ANY($%d::INTEGER[])`, id) - args = append(args, pq.Array(txFilter.Indexes)) - id++ - } - if len(txFilter.PkScriptClasses) > 0 { - pgStr += fmt.Sprintf(` AND tx_outputs.script_class = ANY($%d::INTEGER[])`, id) - args = append(args, pq.Array(txFilter.PkScriptClasses)) - } - return results, tx.Select(&results, pgStr, args...) -} - -// RetrieveGapsInData is used to find the the block numbers at which we are missing data in the db -func (bcr *CIDRetriever) RetrieveGapsInData(validationLevel int) ([]shared.Gap, error) { - log.Info("searching for gaps in the btc ipfs watcher database") - startingBlock, err := bcr.RetrieveFirstBlockNumber() - if err != nil { - return nil, fmt.Errorf("btc CIDRetriever RetrieveFirstBlockNumber error: %v", err) - } - var initialGap []shared.Gap - if startingBlock != 0 { - stop := uint64(startingBlock - 1) - log.Infof("found gap at the beginning of the btc sync from 0 to %d", stop) - initialGap = []shared.Gap{{ - Start: 0, - Stop: stop, - }} - } - - pgStr := `SELECT header_cids.block_number + 1 AS start, min(fr.block_number) - 1 AS stop FROM btc.header_cids - LEFT JOIN btc.header_cids r on btc.header_cids.block_number = r.block_number - 1 - LEFT JOIN btc.header_cids fr on btc.header_cids.block_number < fr.block_number - WHERE r.block_number is NULL and fr.block_number IS NOT NULL - GROUP BY header_cids.block_number, r.block_number` - results := make([]struct { - Start uint64 `db:"start"` - Stop uint64 `db:"stop"` - }, 0) - if err := bcr.db.Select(&results, pgStr); err != nil && err != sql.ErrNoRows { - return nil, err - } - emptyGaps := make([]shared.Gap, len(results)) - for i, res := range results { - emptyGaps[i] = shared.Gap{ - Start: res.Start, - Stop: res.Stop, - } - } - - // Find sections of blocks where we are below the validation level - // There will be no overlap between these "gaps" and the ones above - pgStr = `SELECT block_number FROM btc.header_cids - WHERE times_validated < $1 - ORDER BY block_number` - var heights []uint64 - if err := bcr.db.Select(&heights, pgStr, validationLevel); err != nil && err != sql.ErrNoRows { - return nil, err - } - return append(append(initialGap, emptyGaps...), utils.MissingHeightsToGaps(heights)...), nil -} - -// RetrieveBlockByHash returns all of the CIDs needed to compose an entire block, for a given block hash -func (bcr *CIDRetriever) RetrieveBlockByHash(blockHash common.Hash) (HeaderModel, []TxModel, error) { - log.Debug("retrieving block cids for block hash ", blockHash.String()) - - // Begin new db tx - tx, err := bcr.db.Beginx() - if err != nil { - return HeaderModel{}, nil, err - } - defer func() { - if p := recover(); p != nil { - shared.Rollback(tx) - panic(p) - } else if err != nil { - shared.Rollback(tx) - } else { - err = tx.Commit() - } - }() - - headerCID, err := bcr.RetrieveHeaderCIDByHash(tx, blockHash) - if err != nil { - log.Error("header cid retrieval error") - return HeaderModel{}, nil, err - } - txCIDs, err := bcr.RetrieveTxCIDsByHeaderID(tx, headerCID.ID) - if err != nil { - log.Error("tx cid retrieval error") - } - return headerCID, txCIDs, err -} - -// RetrieveBlockByNumber returns all of the CIDs needed to compose an entire block, for a given block number -func (bcr *CIDRetriever) RetrieveBlockByNumber(blockNumber int64) (HeaderModel, []TxModel, error) { - log.Debug("retrieving block cids for block number ", blockNumber) - - // Begin new db tx - tx, err := bcr.db.Beginx() - if err != nil { - return HeaderModel{}, nil, err - } - defer func() { - if p := recover(); p != nil { - shared.Rollback(tx) - panic(p) - } else if err != nil { - shared.Rollback(tx) - } else { - err = tx.Commit() - } - }() - - headerCID, err := bcr.RetrieveHeaderCIDs(tx, blockNumber) - if err != nil { - log.Error("header cid retrieval error") - return HeaderModel{}, nil, err - } - if len(headerCID) < 1 { - return HeaderModel{}, nil, fmt.Errorf("header cid retrieval error, no header CIDs found at block %d", blockNumber) - } - txCIDs, err := bcr.RetrieveTxCIDsByHeaderID(tx, headerCID[0].ID) - if err != nil { - log.Error("tx cid retrieval error") - } - return headerCID[0], txCIDs, err -} - -// RetrieveHeaderCIDByHash returns the header for the given block hash -func (bcr *CIDRetriever) RetrieveHeaderCIDByHash(tx *sqlx.Tx, blockHash common.Hash) (HeaderModel, error) { - log.Debug("retrieving header cids for block hash ", blockHash.String()) - pgStr := `SELECT * FROM btc.header_cids - WHERE block_hash = $1` - var headerCID HeaderModel - return headerCID, tx.Get(&headerCID, pgStr, blockHash.String()) -} - -// RetrieveTxCIDsByHeaderID retrieves all tx CIDs for the given header id -func (bcr *CIDRetriever) RetrieveTxCIDsByHeaderID(tx *sqlx.Tx, headerID int64) ([]TxModel, error) { - log.Debug("retrieving tx cids for block id ", headerID) - pgStr := `SELECT * FROM btc.transaction_cids - WHERE header_id = $1` - var txCIDs []TxModel - return txCIDs, tx.Select(&txCIDs, pgStr, headerID) -} diff --git a/pkg/btc/cleaner.go b/pkg/btc/cleaner.go deleted file mode 100644 index 0b9d0167..00000000 --- a/pkg/btc/cleaner.go +++ /dev/null @@ -1,193 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package btc - -import ( - "fmt" - - "github.com/jmoiron/sqlx" - "github.com/sirupsen/logrus" - - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/postgres" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/shared" -) - -// Cleaner satisfies the shared.Cleaner interface fo bitcoin -type Cleaner struct { - db *postgres.DB -} - -// NewCleaner returns a new Cleaner struct that satisfies the shared.Cleaner interface -func NewCleaner(db *postgres.DB) *Cleaner { - return &Cleaner{ - db: db, - } -} - -// ResetValidation resets the validation level to 0 to enable revalidation -func (c *Cleaner) ResetValidation(rngs [][2]uint64) error { - tx, err := c.db.Beginx() - if err != nil { - return err - } - for _, rng := range rngs { - logrus.Infof("btc db cleaner resetting validation level to 0 for block range %d to %d", rng[0], rng[1]) - pgStr := `UPDATE btc.header_cids - SET times_validated = 0 - WHERE block_number BETWEEN $1 AND $2` - if _, err := tx.Exec(pgStr, rng[0], rng[1]); err != nil { - shared.Rollback(tx) - return err - } - } - return tx.Commit() -} - -// Clean removes the specified data from the db within the provided block range -func (c *Cleaner) Clean(rngs [][2]uint64, t shared.DataType) error { - tx, err := c.db.Beginx() - if err != nil { - return err - } - for _, rng := range rngs { - logrus.Infof("btc db cleaner cleaning up block range %d to %d", rng[0], rng[1]) - if err := c.clean(tx, rng, t); err != nil { - shared.Rollback(tx) - return err - } - } - if err := tx.Commit(); err != nil { - return err - } - logrus.Infof("btc db cleaner vacuum analyzing cleaned tables to free up space from deleted rows") - return c.vacuumAnalyze(t) -} - -func (c *Cleaner) clean(tx *sqlx.Tx, rng [2]uint64, t shared.DataType) error { - switch t { - case shared.Full, shared.Headers: - return c.cleanFull(tx, rng) - case shared.Transactions: - if err := c.cleanTransactionIPLDs(tx, rng); err != nil { - return err - } - return c.cleanTransactionMetaData(tx, rng) - default: - return fmt.Errorf("btc cleaner unrecognized type: %s", t.String()) - } -} - -func (c *Cleaner) vacuumAnalyze(t shared.DataType) error { - switch t { - case shared.Full, shared.Headers: - if err := c.vacuumHeaders(); err != nil { - return err - } - if err := c.vacuumTxs(); err != nil { - return err - } - if err := c.vacuumTxInputs(); err != nil { - return err - } - if err := c.vacuumTxOutputs(); err != nil { - return err - } - case shared.Transactions: - if err := c.vacuumTxs(); err != nil { - return err - } - if err := c.vacuumTxInputs(); err != nil { - return err - } - if err := c.vacuumTxOutputs(); err != nil { - return err - } - default: - return fmt.Errorf("btc cleaner unrecognized type: %s", t.String()) - } - return c.vacuumIPLDs() -} - -func (c *Cleaner) vacuumHeaders() error { - _, err := c.db.Exec(`VACUUM ANALYZE btc.header_cids`) - return err -} - -func (c *Cleaner) vacuumTxs() error { - _, err := c.db.Exec(`VACUUM ANALYZE btc.transaction_cids`) - return err -} - -func (c *Cleaner) vacuumTxInputs() error { - _, err := c.db.Exec(`VACUUM ANALYZE btc.tx_inputs`) - return err -} - -func (c *Cleaner) vacuumTxOutputs() error { - _, err := c.db.Exec(`VACUUM ANALYZE btc.tx_outputs`) - return err -} - -func (c *Cleaner) vacuumIPLDs() error { - _, err := c.db.Exec(`VACUUM ANALYZE public.blocks`) - return err -} - -func (c *Cleaner) cleanFull(tx *sqlx.Tx, rng [2]uint64) error { - if err := c.cleanTransactionIPLDs(tx, rng); err != nil { - return err - } - if err := c.cleanHeaderIPLDs(tx, rng); err != nil { - return err - } - return c.cleanHeaderMetaData(tx, rng) -} - -func (c *Cleaner) cleanTransactionIPLDs(tx *sqlx.Tx, rng [2]uint64) error { - pgStr := `DELETE FROM public.blocks A - USING btc.transaction_cids B, btc.header_cids C - WHERE A.key = B.mh_key - AND B.header_id = C.id - AND C.block_number BETWEEN $1 AND $2` - _, err := tx.Exec(pgStr, rng[0], rng[1]) - return err -} - -func (c *Cleaner) cleanTransactionMetaData(tx *sqlx.Tx, rng [2]uint64) error { - pgStr := `DELETE FROM btc.transaction_cids A - USING btc.header_cids B - WHERE A.header_id = B.id - AND B.block_number BETWEEN $1 AND $2` - _, err := tx.Exec(pgStr, rng[0], rng[1]) - return err -} - -func (c *Cleaner) cleanHeaderIPLDs(tx *sqlx.Tx, rng [2]uint64) error { - pgStr := `DELETE FROM public.blocks A - USING btc.header_cids B - WHERE A.key = B.mh_key - AND B.block_number BETWEEN $1 AND $2` - _, err := tx.Exec(pgStr, rng[0], rng[1]) - return err -} - -func (c *Cleaner) cleanHeaderMetaData(tx *sqlx.Tx, rng [2]uint64) error { - pgStr := `DELETE FROM btc.header_cids - WHERE block_number BETWEEN $1 AND $2` - _, err := tx.Exec(pgStr, rng[0], rng[1]) - return err -} diff --git a/pkg/btc/cleaner_test.go b/pkg/btc/cleaner_test.go deleted file mode 100644 index c8e43320..00000000 --- a/pkg/btc/cleaner_test.go +++ /dev/null @@ -1,354 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package btc_test - -import ( - "math/big" - - "github.com/ethereum/go-ethereum/crypto" - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/btc" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/postgres" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/shared" -) - -var ( - // Block 0 - // header variables - blockHash1 = crypto.Keccak256Hash([]byte{00, 02}) - blocKNumber1 = big.NewInt(0) - headerCid1 = shared.TestCID([]byte("mockHeader1CID")) - headerMhKey1 = shared.MultihashKeyFromCID(headerCid1) - parentHash = crypto.Keccak256Hash([]byte{00, 01}) - headerModel1 = btc.HeaderModel{ - BlockHash: blockHash1.String(), - BlockNumber: blocKNumber1.String(), - ParentHash: parentHash.String(), - CID: headerCid1.String(), - MhKey: headerMhKey1, - } - - // tx variables - tx1CID = shared.TestCID([]byte("mockTx1CID")) - tx1MhKey = shared.MultihashKeyFromCID(tx1CID) - tx2CID = shared.TestCID([]byte("mockTx2CID")) - tx2MhKey = shared.MultihashKeyFromCID(tx2CID) - tx1Hash = crypto.Keccak256Hash([]byte{01, 01}) - tx2Hash = crypto.Keccak256Hash([]byte{01, 02}) - opHash = crypto.Keccak256Hash([]byte{02, 01}) - txModels1 = []btc.TxModelWithInsAndOuts{ - { - Index: 0, - CID: tx1CID.String(), - MhKey: tx1MhKey, - TxHash: tx1Hash.String(), - SegWit: true, - TxInputs: []btc.TxInput{ - { - Index: 0, - TxWitness: []string{"mockWitness"}, - SignatureScript: []byte{01}, - PreviousOutPointIndex: 0, - PreviousOutPointHash: opHash.String(), - }, - }, - TxOutputs: []btc.TxOutput{ - { - Index: 0, - Value: 50000000, - PkScript: []byte{02}, - ScriptClass: 0, - RequiredSigs: 1, - }, - }, - }, - { - Index: 1, - CID: tx2CID.String(), - MhKey: tx2MhKey, - TxHash: tx2Hash.String(), - SegWit: true, - }, - } - mockCIDPayload1 = &btc.CIDPayload{ - HeaderCID: headerModel1, - TransactionCIDs: txModels1, - } - - // Block 1 - // header variables - blockHash2 = crypto.Keccak256Hash([]byte{00, 03}) - blocKNumber2 = big.NewInt(1) - headerCid2 = shared.TestCID([]byte("mockHeaderCID2")) - headerMhKey2 = shared.MultihashKeyFromCID(headerCid2) - headerModel2 = btc.HeaderModel{ - BlockNumber: blocKNumber2.String(), - BlockHash: blockHash2.String(), - ParentHash: blockHash1.String(), - CID: headerCid2.String(), - MhKey: headerMhKey2, - } - - // tx variables - tx3CID = shared.TestCID([]byte("mockTx3CID")) - tx3MhKey = shared.MultihashKeyFromCID(tx3CID) - tx3Hash = crypto.Keccak256Hash([]byte{01, 03}) - txModels2 = []btc.TxModelWithInsAndOuts{ - { - Index: 0, - CID: tx3CID.String(), - MhKey: tx3MhKey, - TxHash: tx3Hash.String(), - SegWit: true, - }, - } - mockCIDPayload2 = &btc.CIDPayload{ - HeaderCID: headerModel2, - TransactionCIDs: txModels2, - } - rngs = [][2]uint64{{0, 1}} - mhKeys = []string{ - headerMhKey1, - headerMhKey2, - tx1MhKey, - tx2MhKey, - tx3MhKey, - } - mockData = []byte{'\x01'} -) - -var _ = Describe("Cleaner", func() { - var ( - db *postgres.DB - repo *btc.CIDIndexer - cleaner *btc.Cleaner - ) - BeforeEach(func() { - var err error - db, err = shared.SetupDB() - Expect(err).ToNot(HaveOccurred()) - repo = btc.NewCIDIndexer(db) - cleaner = btc.NewCleaner(db) - }) - - Describe("Clean", func() { - BeforeEach(func() { - for _, key := range mhKeys { - _, err := db.Exec(`INSERT INTO public.blocks (key, data) VALUES ($1, $2)`, key, mockData) - Expect(err).ToNot(HaveOccurred()) - } - err := repo.Index(mockCIDPayload1) - Expect(err).ToNot(HaveOccurred()) - err = repo.Index(mockCIDPayload2) - Expect(err).ToNot(HaveOccurred()) - - tx, err := db.Beginx() - Expect(err).ToNot(HaveOccurred()) - var startingIPFSBlocksCount int - pgStr := `SELECT COUNT(*) FROM public.blocks` - err = tx.Get(&startingIPFSBlocksCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - var startingTxCount int - pgStr = `SELECT COUNT(*) FROM btc.transaction_cids` - err = tx.Get(&startingTxCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - var startingHeaderCount int - pgStr = `SELECT COUNT(*) FROM btc.header_cids` - err = tx.Get(&startingHeaderCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - - err = tx.Commit() - Expect(err).ToNot(HaveOccurred()) - - Expect(startingIPFSBlocksCount).To(Equal(5)) - Expect(startingTxCount).To(Equal(3)) - Expect(startingHeaderCount).To(Equal(2)) - }) - AfterEach(func() { - btc.TearDownDB(db) - }) - It("Cleans everything", func() { - err := cleaner.Clean(rngs, shared.Full) - Expect(err).ToNot(HaveOccurred()) - - tx, err := db.Beginx() - Expect(err).ToNot(HaveOccurred()) - var txCount int - pgStr := `SELECT COUNT(*) FROM btc.transaction_cids` - err = tx.Get(&txCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - var txInCount int - pgStr = `SELECT COUNT(*) FROM btc.tx_inputs` - err = tx.Get(&txInCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - var txOutCount int - pgStr = `SELECT COUNT(*) FROM btc.tx_outputs` - err = tx.Get(&txOutCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - var headerCount int - pgStr = `SELECT COUNT(*) FROM btc.header_cids` - err = tx.Get(&headerCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - var blocksCount int - pgStr = `SELECT COUNT(*) FROM public.blocks` - err = tx.Get(&blocksCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - - err = tx.Commit() - Expect(err).ToNot(HaveOccurred()) - - Expect(blocksCount).To(Equal(0)) - Expect(txCount).To(Equal(0)) - Expect(txInCount).To(Equal(0)) - Expect(txOutCount).To(Equal(0)) - Expect(headerCount).To(Equal(0)) - }) - It("Cleans headers and all linked data", func() { - err := cleaner.Clean(rngs, shared.Headers) - Expect(err).ToNot(HaveOccurred()) - - tx, err := db.Beginx() - Expect(err).ToNot(HaveOccurred()) - var txCount int - pgStr := `SELECT COUNT(*) FROM btc.transaction_cids` - err = tx.Get(&txCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - var txInCount int - pgStr = `SELECT COUNT(*) FROM btc.tx_inputs` - err = tx.Get(&txInCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - var txOutCount int - pgStr = `SELECT COUNT(*) FROM btc.tx_outputs` - err = tx.Get(&txOutCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - var headerCount int - pgStr = `SELECT COUNT(*) FROM btc.header_cids` - err = tx.Get(&headerCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - var blocksCount int - pgStr = `SELECT COUNT(*) FROM public.blocks` - err = tx.Get(&blocksCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - - err = tx.Commit() - Expect(err).ToNot(HaveOccurred()) - - Expect(blocksCount).To(Equal(0)) - Expect(txCount).To(Equal(0)) - Expect(txInCount).To(Equal(0)) - Expect(txOutCount).To(Equal(0)) - Expect(headerCount).To(Equal(0)) - }) - It("Cleans transactions", func() { - err := cleaner.Clean(rngs, shared.Transactions) - Expect(err).ToNot(HaveOccurred()) - - tx, err := db.Beginx() - Expect(err).ToNot(HaveOccurred()) - var txCount int - pgStr := `SELECT COUNT(*) FROM btc.transaction_cids` - err = tx.Get(&txCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - var txInCount int - pgStr = `SELECT COUNT(*) FROM btc.tx_inputs` - err = tx.Get(&txInCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - var txOutCount int - pgStr = `SELECT COUNT(*) FROM btc.tx_outputs` - err = tx.Get(&txOutCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - var headerCount int - pgStr = `SELECT COUNT(*) FROM btc.header_cids` - err = tx.Get(&headerCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - var blocksCount int - pgStr = `SELECT COUNT(*) FROM public.blocks` - err = tx.Get(&blocksCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - - err = tx.Commit() - Expect(err).ToNot(HaveOccurred()) - - Expect(blocksCount).To(Equal(2)) - Expect(txCount).To(Equal(0)) - Expect(txInCount).To(Equal(0)) - Expect(txOutCount).To(Equal(0)) - Expect(headerCount).To(Equal(2)) - }) - }) - - Describe("ResetValidation", func() { - BeforeEach(func() { - for _, key := range mhKeys { - _, err := db.Exec(`INSERT INTO public.blocks (key, data) VALUES ($1, $2)`, key, mockData) - Expect(err).ToNot(HaveOccurred()) - } - - err := repo.Index(mockCIDPayload1) - Expect(err).ToNot(HaveOccurred()) - err = repo.Index(mockCIDPayload2) - Expect(err).ToNot(HaveOccurred()) - - var validationTimes []int - pgStr := `SELECT times_validated FROM btc.header_cids` - err = db.Select(&validationTimes, pgStr) - Expect(err).ToNot(HaveOccurred()) - Expect(len(validationTimes)).To(Equal(2)) - Expect(validationTimes[0]).To(Equal(1)) - Expect(validationTimes[1]).To(Equal(1)) - - err = repo.Index(mockCIDPayload1) - Expect(err).ToNot(HaveOccurred()) - - validationTimes = []int{} - pgStr = `SELECT times_validated FROM btc.header_cids ORDER BY block_number` - err = db.Select(&validationTimes, pgStr) - Expect(err).ToNot(HaveOccurred()) - Expect(len(validationTimes)).To(Equal(2)) - Expect(validationTimes[0]).To(Equal(2)) - Expect(validationTimes[1]).To(Equal(1)) - }) - AfterEach(func() { - btc.TearDownDB(db) - }) - It("Resets the validation level", func() { - err := cleaner.ResetValidation(rngs) - Expect(err).ToNot(HaveOccurred()) - - var validationTimes []int - pgStr := `SELECT times_validated FROM btc.header_cids` - err = db.Select(&validationTimes, pgStr) - Expect(err).ToNot(HaveOccurred()) - Expect(len(validationTimes)).To(Equal(2)) - Expect(validationTimes[0]).To(Equal(0)) - Expect(validationTimes[1]).To(Equal(0)) - - err = repo.Index(mockCIDPayload2) - Expect(err).ToNot(HaveOccurred()) - - validationTimes = []int{} - pgStr = `SELECT times_validated FROM btc.header_cids ORDER BY block_number` - err = db.Select(&validationTimes, pgStr) - Expect(err).ToNot(HaveOccurred()) - Expect(len(validationTimes)).To(Equal(2)) - Expect(validationTimes[0]).To(Equal(0)) - Expect(validationTimes[1]).To(Equal(1)) - }) - }) -}) diff --git a/pkg/btc/converter.go b/pkg/btc/converter.go deleted file mode 100644 index 7169f36c..00000000 --- a/pkg/btc/converter.go +++ /dev/null @@ -1,102 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package btc - -import ( - "encoding/hex" - "fmt" - - "github.com/btcsuite/btcd/chaincfg" - "github.com/btcsuite/btcd/txscript" - - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/shared" -) - -// PayloadConverter satisfies the PayloadConverter interface for bitcoin -type PayloadConverter struct { - chainConfig *chaincfg.Params -} - -// NewPayloadConverter creates a pointer to a new PayloadConverter which satisfies the PayloadConverter interface -func NewPayloadConverter(chainConfig *chaincfg.Params) *PayloadConverter { - return &PayloadConverter{ - chainConfig: chainConfig, - } -} - -// Convert method is used to convert a bitcoin BlockPayload to an IPLDPayload -// Satisfies the shared.PayloadConverter interface -func (pc *PayloadConverter) Convert(payload shared.RawChainData) (shared.ConvertedData, error) { - btcBlockPayload, ok := payload.(BlockPayload) - if !ok { - return nil, fmt.Errorf("btc converter: expected payload type %T got %T", BlockPayload{}, payload) - } - txMeta := make([]TxModelWithInsAndOuts, len(btcBlockPayload.Txs)) - for i, tx := range btcBlockPayload.Txs { - txModel := TxModelWithInsAndOuts{ - TxHash: tx.Hash().String(), - Index: int64(i), - SegWit: tx.HasWitness(), - TxOutputs: make([]TxOutput, len(tx.MsgTx().TxOut)), - TxInputs: make([]TxInput, len(tx.MsgTx().TxIn)), - } - if tx.HasWitness() { - txModel.WitnessHash = tx.WitnessHash().String() - } - for i, in := range tx.MsgTx().TxIn { - txModel.TxInputs[i] = TxInput{ - Index: int64(i), - SignatureScript: in.SignatureScript, - PreviousOutPointHash: in.PreviousOutPoint.Hash.String(), - PreviousOutPointIndex: in.PreviousOutPoint.Index, - TxWitness: convertBytesToHexArray(in.Witness), - } - } - for i, out := range tx.MsgTx().TxOut { - scriptClass, addresses, numberOfSigs, err := txscript.ExtractPkScriptAddrs(out.PkScript, pc.chainConfig) - // if we receive an error but the txscript type isn't NonStandardTy then something went wrong - if err != nil && scriptClass != txscript.NonStandardTy { - return nil, err - } - stringAddrs := make([]string, len(addresses)) - for i, addr := range addresses { - stringAddrs[i] = addr.EncodeAddress() - } - txModel.TxOutputs[i] = TxOutput{ - Index: int64(i), - Value: out.Value, - PkScript: out.PkScript, - RequiredSigs: int64(numberOfSigs), - ScriptClass: uint8(scriptClass), - Addresses: stringAddrs, - } - } - txMeta[i] = txModel - } - return ConvertedPayload{ - BlockPayload: btcBlockPayload, - TxMetaData: txMeta, - }, nil -} - -func convertBytesToHexArray(bytea [][]byte) []string { - var strs []string - for _, b := range bytea { - strs = append(strs, hex.EncodeToString(b)) - } - return strs -} diff --git a/pkg/btc/converter_test.go b/pkg/btc/converter_test.go deleted file mode 100644 index bffc08bf..00000000 --- a/pkg/btc/converter_test.go +++ /dev/null @@ -1,43 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package btc_test - -import ( - "github.com/btcsuite/btcd/chaincfg" - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/btc" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/btc/mocks" -) - -var _ = Describe("Converter", func() { - Describe("Convert", func() { - It("Converts mock BlockPayloads into the expected IPLDPayloads", func() { - converter := btc.NewPayloadConverter(&chaincfg.MainNetParams) - payload, err := converter.Convert(mocks.MockBlockPayload) - Expect(err).ToNot(HaveOccurred()) - convertedPayload, ok := payload.(btc.ConvertedPayload) - Expect(ok).To(BeTrue()) - Expect(convertedPayload).To(Equal(mocks.MockConvertedPayload)) - Expect(convertedPayload.BlockHeight).To(Equal(mocks.MockBlockHeight)) - Expect(convertedPayload.Header).To(Equal(&mocks.MockBlock.Header)) - Expect(convertedPayload.Txs).To(Equal(mocks.MockTransactions)) - Expect(convertedPayload.TxMetaData).To(Equal(mocks.MockTxsMetaData)) - }) - }) -}) diff --git a/pkg/btc/filterer.go b/pkg/btc/filterer.go deleted file mode 100644 index 3a6598cf..00000000 --- a/pkg/btc/filterer.go +++ /dev/null @@ -1,159 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package btc - -import ( - "bytes" - "fmt" - "math/big" - - "github.com/multiformats/go-multihash" - - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/ipfs" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/ipfs/ipld" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/shared" -) - -// ResponseFilterer satisfies the ResponseFilterer interface for bitcoin -type ResponseFilterer struct{} - -// NewResponseFilterer creates a new Filterer satisfying the ResponseFilterer interface -func NewResponseFilterer() *ResponseFilterer { - return &ResponseFilterer{} -} - -// Filter is used to filter through btc data to extract and package requested data into a Payload -func (s *ResponseFilterer) Filter(filter shared.SubscriptionSettings, payload shared.ConvertedData) (shared.IPLDs, error) { - btcFilters, ok := filter.(*SubscriptionSettings) - if !ok { - return IPLDs{}, fmt.Errorf("btc filterer expected filter type %T got %T", &SubscriptionSettings{}, filter) - } - btcPayload, ok := payload.(ConvertedPayload) - if !ok { - return IPLDs{}, fmt.Errorf("btc filterer expected payload type %T got %T", ConvertedPayload{}, payload) - } - height := int64(btcPayload.BlockPayload.BlockHeight) - if checkRange(btcFilters.Start.Int64(), btcFilters.End.Int64(), height) { - response := new(IPLDs) - if err := s.filterHeaders(btcFilters.HeaderFilter, response, btcPayload); err != nil { - return IPLDs{}, err - } - if err := s.filterTransactions(btcFilters.TxFilter, response, btcPayload); err != nil { - return IPLDs{}, err - } - response.BlockNumber = big.NewInt(height) - return *response, nil - } - return IPLDs{}, nil -} - -func (s *ResponseFilterer) filterHeaders(headerFilter HeaderFilter, response *IPLDs, payload ConvertedPayload) error { - if !headerFilter.Off { - headerBuffer := new(bytes.Buffer) - if err := payload.Header.Serialize(headerBuffer); err != nil { - return err - } - data := headerBuffer.Bytes() - cid, err := ipld.RawdataToCid(ipld.MBitcoinHeader, data, multihash.DBL_SHA2_256) - if err != nil { - return err - } - response.Header = ipfs.BlockModel{ - Data: data, - CID: cid.String(), - } - } - return nil -} - -func checkRange(start, end, actual int64) bool { - if (end <= 0 || end >= actual) && start <= actual { - return true - } - return false -} - -func (s *ResponseFilterer) filterTransactions(trxFilter TxFilter, response *IPLDs, payload ConvertedPayload) error { - if !trxFilter.Off { - response.Transactions = make([]ipfs.BlockModel, 0, len(payload.TxMetaData)) - for i, txMeta := range payload.TxMetaData { - if checkTransaction(txMeta, trxFilter) { - trxBuffer := new(bytes.Buffer) - if err := payload.Txs[i].MsgTx().Serialize(trxBuffer); err != nil { - return err - } - data := trxBuffer.Bytes() - cid, err := ipld.RawdataToCid(ipld.MBitcoinTx, data, multihash.DBL_SHA2_256) - if err != nil { - return err - } - response.Transactions = append(response.Transactions, ipfs.BlockModel{ - Data: data, - CID: cid.String(), - }) - } - } - } - return nil -} - -// checkTransaction returns true if the provided transaction has a hit on the filter -func checkTransaction(txMeta TxModelWithInsAndOuts, txFilter TxFilter) bool { - passesSegwitFilter := false - if !txFilter.Segwit || (txFilter.Segwit && txMeta.SegWit) { - passesSegwitFilter = true - } - passesMultiSigFilter := !txFilter.MultiSig - if txFilter.MultiSig { - for _, out := range txMeta.TxOutputs { - if out.RequiredSigs > 1 { - passesMultiSigFilter = true - } - } - } - passesWitnessFilter := len(txFilter.WitnessHashes) == 0 - for _, wantedWitnessHash := range txFilter.WitnessHashes { - if wantedWitnessHash == txMeta.WitnessHash { - passesWitnessFilter = true - } - } - passesAddressFilter := len(txFilter.Addresses) == 0 - for _, wantedAddress := range txFilter.Addresses { - for _, out := range txMeta.TxOutputs { - for _, actualAddress := range out.Addresses { - if wantedAddress == actualAddress { - passesAddressFilter = true - } - } - } - } - passesIndexFilter := len(txFilter.Indexes) == 0 - for _, wantedIndex := range txFilter.Indexes { - if wantedIndex == txMeta.Index { - passesIndexFilter = true - } - } - passesPkScriptClassFilter := len(txFilter.PkScriptClasses) == 0 - for _, wantedPkScriptClass := range txFilter.PkScriptClasses { - for _, out := range txMeta.TxOutputs { - if out.ScriptClass == wantedPkScriptClass { - passesPkScriptClassFilter = true - } - } - } - return passesSegwitFilter && passesMultiSigFilter && passesWitnessFilter && passesAddressFilter && passesIndexFilter && passesPkScriptClassFilter -} diff --git a/pkg/btc/http_streamer.go b/pkg/btc/http_streamer.go deleted file mode 100644 index be00a508..00000000 --- a/pkg/btc/http_streamer.go +++ /dev/null @@ -1,104 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package btc - -import ( - "bytes" - "time" - - "github.com/btcsuite/btcd/rpcclient" - "github.com/sirupsen/logrus" - - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/shared" -) - -// HTTPPayloadStreamer satisfies the PayloadStreamer interface for bitcoin over http endpoints (since bitcoin core doesn't support websockets) -type HTTPPayloadStreamer struct { - Config *rpcclient.ConnConfig - lastHash []byte -} - -// NewHTTPPayloadStreamer creates a pointer to a new PayloadStreamer which satisfies the PayloadStreamer interface for bitcoin -func NewHTTPPayloadStreamer(clientConfig *rpcclient.ConnConfig) *HTTPPayloadStreamer { - return &HTTPPayloadStreamer{ - Config: clientConfig, - } -} - -// Stream is the main loop for subscribing to data from the btc block notifications -// Satisfies the shared.PayloadStreamer interface -func (ps *HTTPPayloadStreamer) Stream(payloadChan chan shared.RawChainData) (shared.ClientSubscription, error) { - logrus.Debug("streaming block payloads from btc") - client, err := rpcclient.New(ps.Config, nil) - if err != nil { - return nil, err - } - ticker := time.NewTicker(time.Second * 5) - errChan := make(chan error) - go func() { - for { - // start at - select { - case <-ticker.C: - height, err := client.GetBlockCount() - if err != nil { - errChan <- err - continue - } - blockHash, err := client.GetBlockHash(height) - if err != nil { - errChan <- err - continue - } - blockHashBytes := blockHash.CloneBytes() - if bytes.Equal(blockHashBytes, ps.lastHash) { - continue - } - block, err := client.GetBlock(blockHash) - if err != nil { - errChan <- err - continue - } - ps.lastHash = blockHashBytes - payloadChan <- BlockPayload{ - Header: &block.Header, - BlockHeight: height, - Txs: msgTxsToUtilTxs(block.Transactions), - } - default: - } - } - }() - return &HTTPClientSubscription{client: client, errChan: errChan}, nil -} - -// HTTPClientSubscription is a wrapper around the underlying bitcoind rpc client -// to fit the shared.ClientSubscription interface -type HTTPClientSubscription struct { - client *rpcclient.Client - errChan chan error -} - -// Unsubscribe satisfies the rpc.Subscription interface -func (bcs *HTTPClientSubscription) Unsubscribe() { - bcs.client.Shutdown() -} - -// Err() satisfies the rpc.Subscription interface -func (bcs *HTTPClientSubscription) Err() <-chan error { - return bcs.errChan -} diff --git a/pkg/btc/indexer.go b/pkg/btc/indexer.go deleted file mode 100644 index bd9af179..00000000 --- a/pkg/btc/indexer.go +++ /dev/null @@ -1,132 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package btc - -import ( - "fmt" - - "github.com/sirupsen/logrus" - - "github.com/jmoiron/sqlx" - "github.com/lib/pq" - - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/postgres" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/shared" -) - -type CIDIndexer struct { - db *postgres.DB -} - -func NewCIDIndexer(db *postgres.DB) *CIDIndexer { - return &CIDIndexer{ - db: db, - } -} - -func (in *CIDIndexer) Index(cids shared.CIDsForIndexing) error { - cidWrapper, ok := cids.(*CIDPayload) - if !ok { - return fmt.Errorf("btc indexer expected cids type %T got %T", &CIDPayload{}, cids) - } - - // Begin new db tx - tx, err := in.db.Beginx() - if err != nil { - return err - } - defer func() { - if p := recover(); p != nil { - shared.Rollback(tx) - panic(p) - } else if err != nil { - shared.Rollback(tx) - } else { - err = tx.Commit() - } - }() - - headerID, err := in.indexHeaderCID(tx, cidWrapper.HeaderCID) - if err != nil { - logrus.Error("btc indexer error when indexing header") - return err - } - err = in.indexTransactionCIDs(tx, cidWrapper.TransactionCIDs, headerID) - if err != nil { - logrus.Error("btc indexer error when indexing transactions") - } - return err -} - -func (in *CIDIndexer) indexHeaderCID(tx *sqlx.Tx, header HeaderModel) (int64, error) { - var headerID int64 - err := tx.QueryRowx(`INSERT INTO btc.header_cids (block_number, block_hash, parent_hash, cid, timestamp, bits, node_id, mh_key, times_validated) - VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9) - ON CONFLICT (block_number, block_hash) DO UPDATE SET (parent_hash, cid, timestamp, bits, node_id, mh_key, times_validated) = ($3, $4, $5, $6, $7, $8, btc.header_cids.times_validated + 1) - RETURNING id`, - header.BlockNumber, header.BlockHash, header.ParentHash, header.CID, header.Timestamp, header.Bits, in.db.NodeID, header.MhKey, 1).Scan(&headerID) - return headerID, err -} - -func (in *CIDIndexer) indexTransactionCIDs(tx *sqlx.Tx, transactions []TxModelWithInsAndOuts, headerID int64) error { - for _, transaction := range transactions { - txID, err := in.indexTransactionCID(tx, transaction, headerID) - if err != nil { - logrus.Error("btc indexer error when indexing header") - return err - } - for _, input := range transaction.TxInputs { - if err := in.indexTxInput(tx, input, txID); err != nil { - logrus.Error("btc indexer error when indexing tx inputs") - return err - } - } - for _, output := range transaction.TxOutputs { - if err := in.indexTxOutput(tx, output, txID); err != nil { - logrus.Error("btc indexer error when indexing tx outputs") - return err - } - } - } - return nil -} - -func (in *CIDIndexer) indexTransactionCID(tx *sqlx.Tx, transaction TxModelWithInsAndOuts, headerID int64) (int64, error) { - var txID int64 - err := tx.QueryRowx(`INSERT INTO btc.transaction_cids (header_id, tx_hash, index, cid, segwit, witness_hash, mh_key) - VALUES ($1, $2, $3, $4, $5, $6, $7) - ON CONFLICT (tx_hash) DO UPDATE SET (header_id, index, cid, segwit, witness_hash, mh_key) = ($1, $3, $4, $5, $6, $7) - RETURNING id`, - headerID, transaction.TxHash, transaction.Index, transaction.CID, transaction.SegWit, transaction.WitnessHash, transaction.MhKey).Scan(&txID) - return txID, err -} - -func (in *CIDIndexer) indexTxInput(tx *sqlx.Tx, txInput TxInput, txID int64) error { - _, err := tx.Exec(`INSERT INTO btc.tx_inputs (tx_id, index, witness, sig_script, outpoint_tx_hash, outpoint_index) - VALUES ($1, $2, $3, $4, $5, $6) - ON CONFLICT (tx_id, index) DO UPDATE SET (witness, sig_script, outpoint_tx_hash, outpoint_index) = ($3, $4, $5, $6)`, - txID, txInput.Index, pq.Array(txInput.TxWitness), txInput.SignatureScript, txInput.PreviousOutPointHash, txInput.PreviousOutPointIndex) - return err -} - -func (in *CIDIndexer) indexTxOutput(tx *sqlx.Tx, txOuput TxOutput, txID int64) error { - _, err := tx.Exec(`INSERT INTO btc.tx_outputs (tx_id, index, value, pk_script, script_class, addresses, required_sigs) - VALUES ($1, $2, $3, $4, $5, $6, $7) - ON CONFLICT (tx_id, index) DO UPDATE SET (value, pk_script, script_class, addresses, required_sigs) = ($3, $4, $5, $6, $7)`, - txID, txOuput.Index, txOuput.Value, txOuput.PkScript, txOuput.ScriptClass, txOuput.Addresses, txOuput.RequiredSigs) - return err -} diff --git a/pkg/btc/indexer_test.go b/pkg/btc/indexer_test.go deleted file mode 100644 index 8b4dea11..00000000 --- a/pkg/btc/indexer_test.go +++ /dev/null @@ -1,94 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package btc_test - -import ( - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/btc" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/btc/mocks" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/postgres" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/shared" -) - -var _ = Describe("Indexer", func() { - var ( - db *postgres.DB - err error - repo *btc.CIDIndexer - mockData = []byte{1, 2, 3} - ) - BeforeEach(func() { - db, err = shared.SetupDB() - Expect(err).ToNot(HaveOccurred()) - repo = btc.NewCIDIndexer(db) - // need entries in the public.blocks with the mhkeys or the FK constraint will fail - shared.PublishMockIPLD(db, mocks.MockHeaderMhKey, mockData) - shared.PublishMockIPLD(db, mocks.MockTrxMhKey1, mockData) - shared.PublishMockIPLD(db, mocks.MockTrxMhKey2, mockData) - shared.PublishMockIPLD(db, mocks.MockTrxMhKey3, mockData) - }) - AfterEach(func() { - btc.TearDownDB(db) - }) - - Describe("Index", func() { - It("Indexes CIDs and related metadata into vulcanizedb", func() { - - err = repo.Index(&mocks.MockCIDPayload) - Expect(err).ToNot(HaveOccurred()) - pgStr := `SELECT * FROM btc.header_cids - WHERE block_number = $1` - // check header was properly indexed - header := new(btc.HeaderModel) - err = db.Get(header, pgStr, mocks.MockHeaderMetaData.BlockNumber) - Expect(err).ToNot(HaveOccurred()) - Expect(header.CID).To(Equal(mocks.MockHeaderMetaData.CID)) - Expect(header.BlockNumber).To(Equal(mocks.MockHeaderMetaData.BlockNumber)) - Expect(header.Bits).To(Equal(mocks.MockHeaderMetaData.Bits)) - Expect(header.Timestamp).To(Equal(mocks.MockHeaderMetaData.Timestamp)) - Expect(header.BlockHash).To(Equal(mocks.MockHeaderMetaData.BlockHash)) - Expect(header.ParentHash).To(Equal(mocks.MockHeaderMetaData.ParentHash)) - // check trxs were properly indexed - trxs := make([]btc.TxModel, 0) - pgStr = `SELECT transaction_cids.id, transaction_cids.header_id, transaction_cids.index, - transaction_cids.tx_hash, transaction_cids.cid, transaction_cids.segwit, transaction_cids.witness_hash - FROM btc.transaction_cids INNER JOIN btc.header_cids ON (transaction_cids.header_id = header_cids.id) - WHERE header_cids.block_number = $1` - err = db.Select(&trxs, pgStr, mocks.MockHeaderMetaData.BlockNumber) - Expect(err).ToNot(HaveOccurred()) - Expect(len(trxs)).To(Equal(3)) - for _, tx := range trxs { - Expect(tx.SegWit).To(Equal(false)) - Expect(tx.HeaderID).To(Equal(header.ID)) - Expect(tx.WitnessHash).To(Equal("")) - switch tx.Index { - case 0: - Expect(tx.CID).To(Equal(mocks.MockTrxCID1.String())) - Expect(tx.TxHash).To(Equal(mocks.MockBlock.Transactions[0].TxHash().String())) - case 1: - Expect(tx.CID).To(Equal(mocks.MockTrxCID2.String())) - Expect(tx.TxHash).To(Equal(mocks.MockBlock.Transactions[1].TxHash().String())) - case 2: - Expect(tx.CID).To(Equal(mocks.MockTrxCID3.String())) - Expect(tx.TxHash).To(Equal(mocks.MockBlock.Transactions[2].TxHash().String())) - } - } - }) - }) -}) diff --git a/pkg/btc/ipld_fetcher.go b/pkg/btc/ipld_fetcher.go deleted file mode 100644 index 0e8f1213..00000000 --- a/pkg/btc/ipld_fetcher.go +++ /dev/null @@ -1,107 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package btc - -import ( - "fmt" - - "github.com/jmoiron/sqlx" - log "github.com/sirupsen/logrus" - - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/ipfs" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/postgres" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/shared" -) - -// IPLDFetcher satisfies the IPLDFetcher interface for ethereum -// it interfaces directly with PG-IPFS instead of going through a node-interface or remote node -type IPLDFetcher struct { - db *postgres.DB -} - -// NewIPLDFetcher creates a pointer to a new IPLDFetcher -func NewIPLDFetcher(db *postgres.DB) *IPLDFetcher { - return &IPLDFetcher{ - db: db, - } -} - -// Fetch is the exported method for fetching and returning all the IPLDS specified in the CIDWrapper -func (f *IPLDFetcher) Fetch(cids shared.CIDsForFetching) (shared.IPLDs, error) { - cidWrapper, ok := cids.(*CIDWrapper) - if !ok { - return nil, fmt.Errorf("btc fetcher: expected cids type %T got %T", &CIDWrapper{}, cids) - } - log.Debug("fetching iplds") - iplds := IPLDs{} - iplds.BlockNumber = cidWrapper.BlockNumber - - tx, err := f.db.Beginx() - if err != nil { - return nil, err - } - defer func() { - if p := recover(); p != nil { - shared.Rollback(tx) - panic(p) - } else if err != nil { - shared.Rollback(tx) - } else { - err = tx.Commit() - } - }() - - iplds.Header, err = f.FetchHeader(tx, cidWrapper.Header) - if err != nil { - return nil, fmt.Errorf("btc pg fetcher: header fetching error: %s", err.Error()) - } - iplds.Transactions, err = f.FetchTrxs(tx, cidWrapper.Transactions) - if err != nil { - return nil, fmt.Errorf("btc pg fetcher: transaction fetching error: %s", err.Error()) - } - return iplds, err -} - -// FetchHeaders fetches headers -func (f *IPLDFetcher) FetchHeader(tx *sqlx.Tx, c HeaderModel) (ipfs.BlockModel, error) { - log.Debug("fetching header ipld") - headerBytes, err := shared.FetchIPLDByMhKey(tx, c.MhKey) - if err != nil { - return ipfs.BlockModel{}, err - } - return ipfs.BlockModel{ - Data: headerBytes, - CID: c.CID, - }, nil -} - -// FetchTrxs fetches transactions -func (f *IPLDFetcher) FetchTrxs(tx *sqlx.Tx, cids []TxModel) ([]ipfs.BlockModel, error) { - log.Debug("fetching transaction iplds") - trxIPLDs := make([]ipfs.BlockModel, len(cids)) - for i, c := range cids { - trxBytes, err := shared.FetchIPLDByMhKey(tx, c.MhKey) - if err != nil { - return nil, err - } - trxIPLDs[i] = ipfs.BlockModel{ - Data: trxBytes, - CID: c.CID, - } - } - return trxIPLDs, nil -} diff --git a/pkg/btc/mocks/converter.go b/pkg/btc/mocks/converter.go deleted file mode 100644 index 335f2c6b..00000000 --- a/pkg/btc/mocks/converter.go +++ /dev/null @@ -1,64 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package mocks - -import ( - "fmt" - - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/btc" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/shared" -) - -// PayloadConverter is the underlying struct for the Converter interface -type PayloadConverter struct { - PassedStatediffPayload btc.BlockPayload - ReturnIPLDPayload btc.ConvertedPayload - ReturnErr error -} - -// Convert method is used to convert a geth statediff.Payload to a IPLDPayload -func (pc *PayloadConverter) Convert(payload shared.RawChainData) (shared.ConvertedData, error) { - stateDiffPayload, ok := payload.(btc.BlockPayload) - if !ok { - return nil, fmt.Errorf("convert expected payload type %T got %T", btc.BlockPayload{}, payload) - } - pc.PassedStatediffPayload = stateDiffPayload - return pc.ReturnIPLDPayload, pc.ReturnErr -} - -// IterativePayloadConverter is the underlying struct for the Converter interface -type IterativePayloadConverter struct { - PassedStatediffPayload []btc.BlockPayload - ReturnIPLDPayload []btc.ConvertedPayload - ReturnErr error - iteration int -} - -// Convert method is used to convert a geth statediff.Payload to a IPLDPayload -func (pc *IterativePayloadConverter) Convert(payload shared.RawChainData) (shared.ConvertedData, error) { - stateDiffPayload, ok := payload.(btc.BlockPayload) - if !ok { - return nil, fmt.Errorf("convert expected payload type %T got %T", btc.BlockPayload{}, payload) - } - pc.PassedStatediffPayload = append(pc.PassedStatediffPayload, stateDiffPayload) - if len(pc.PassedStatediffPayload) < pc.iteration+1 { - return nil, fmt.Errorf("IterativePayloadConverter does not have a payload to return at iteration %d", pc.iteration) - } - returnPayload := pc.ReturnIPLDPayload[pc.iteration] - pc.iteration++ - return returnPayload, pc.ReturnErr -} diff --git a/pkg/btc/mocks/indexer.go b/pkg/btc/mocks/indexer.go deleted file mode 100644 index 14b51677..00000000 --- a/pkg/btc/mocks/indexer.go +++ /dev/null @@ -1,40 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package mocks - -import ( - "fmt" - - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/btc" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/shared" -) - -// CIDIndexer is the underlying struct for the Indexer interface -type CIDIndexer struct { - PassedCIDPayload []*btc.CIDPayload - ReturnErr error -} - -// Index indexes a cidPayload in Postgres -func (repo *CIDIndexer) Index(cids shared.CIDsForIndexing) error { - cidPayload, ok := cids.(*btc.CIDPayload) - if !ok { - return fmt.Errorf("index expected cids type %T got %T", &btc.CIDPayload{}, cids) - } - repo.PassedCIDPayload = append(repo.PassedCIDPayload, cidPayload) - return repo.ReturnErr -} diff --git a/pkg/btc/mocks/publisher.go b/pkg/btc/mocks/publisher.go deleted file mode 100644 index 11d1e27d..00000000 --- a/pkg/btc/mocks/publisher.go +++ /dev/null @@ -1,65 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package mocks - -import ( - "fmt" - - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/btc" - - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/shared" -) - -// IPLDPublisher is the underlying struct for the Publisher interface -type IPLDPublisher struct { - PassedIPLDPayload btc.ConvertedPayload - ReturnCIDPayload *btc.CIDPayload - ReturnErr error -} - -// Publish publishes an IPLDPayload to IPFS and returns the corresponding CIDPayload -func (pub *IPLDPublisher) Publish(payload shared.ConvertedData) (shared.CIDsForIndexing, error) { - ipldPayload, ok := payload.(btc.ConvertedPayload) - if !ok { - return nil, fmt.Errorf("publish expected payload type %T got %T", &btc.ConvertedPayload{}, payload) - } - pub.PassedIPLDPayload = ipldPayload - return pub.ReturnCIDPayload, pub.ReturnErr -} - -// IterativeIPLDPublisher is the underlying struct for the Publisher interface; used in testing -type IterativeIPLDPublisher struct { - PassedIPLDPayload []btc.ConvertedPayload - ReturnCIDPayload []*btc.CIDPayload - ReturnErr error - iteration int -} - -// Publish publishes an IPLDPayload to IPFS and returns the corresponding CIDPayload -func (pub *IterativeIPLDPublisher) Publish(payload shared.ConvertedData) (shared.CIDsForIndexing, error) { - ipldPayload, ok := payload.(btc.ConvertedPayload) - if !ok { - return nil, fmt.Errorf("publish expected payload type %T got %T", &btc.ConvertedPayload{}, payload) - } - pub.PassedIPLDPayload = append(pub.PassedIPLDPayload, ipldPayload) - if len(pub.ReturnCIDPayload) < pub.iteration+1 { - return nil, fmt.Errorf("IterativeIPLDPublisher does not have a payload to return at iteration %d", pub.iteration) - } - returnPayload := pub.ReturnCIDPayload[pub.iteration] - pub.iteration++ - return returnPayload, pub.ReturnErr -} diff --git a/pkg/btc/mocks/test_data.go b/pkg/btc/mocks/test_data.go deleted file mode 100644 index 7fc347ea..00000000 --- a/pkg/btc/mocks/test_data.go +++ /dev/null @@ -1,709 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package mocks - -import ( - "strconv" - "time" - - "github.com/btcsuite/btcd/chaincfg" - "github.com/btcsuite/btcd/chaincfg/chainhash" - "github.com/btcsuite/btcd/txscript" - "github.com/btcsuite/btcd/wire" - "github.com/btcsuite/btcutil" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/btc" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/shared" -) - -var ( - MockHeaderCID = shared.TestCID([]byte("MockHeaderCID")) - MockTrxCID1 = shared.TestCID([]byte("MockTrxCID1")) - MockTrxCID2 = shared.TestCID([]byte("MockTrxCID2")) - MockTrxCID3 = shared.TestCID([]byte("MockTrxCID3")) - MockHeaderMhKey = shared.MultihashKeyFromCID(MockHeaderCID) - MockTrxMhKey1 = shared.MultihashKeyFromCID(MockTrxCID1) - MockTrxMhKey2 = shared.MultihashKeyFromCID(MockTrxCID2) - MockTrxMhKey3 = shared.MultihashKeyFromCID(MockTrxCID3) - MockBlockHeight int64 = 1337 - MockBlock = wire.MsgBlock{ - Header: wire.BlockHeader{ - Version: 1, - PrevBlock: chainhash.Hash([32]byte{ // Make go vet happy. - 0x50, 0x12, 0x01, 0x19, 0x17, 0x2a, 0x61, 0x04, - 0x21, 0xa6, 0xc3, 0x01, 0x1d, 0xd3, 0x30, 0xd9, - 0xdf, 0x07, 0xb6, 0x36, 0x16, 0xc2, 0xcc, 0x1f, - 0x1c, 0xd0, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, - }), // 000000000002d01c1fccc21636b607dfd930d31d01c3a62104612a1719011250 - MerkleRoot: chainhash.Hash([32]byte{ // Make go vet happy. - 0x66, 0x57, 0xa9, 0x25, 0x2a, 0xac, 0xd5, 0xc0, - 0xb2, 0x94, 0x09, 0x96, 0xec, 0xff, 0x95, 0x22, - 0x28, 0xc3, 0x06, 0x7c, 0xc3, 0x8d, 0x48, 0x85, - 0xef, 0xb5, 0xa4, 0xac, 0x42, 0x47, 0xe9, 0xf3, - }), // f3e94742aca4b5ef85488dc37c06c3282295ffec960994b2c0d5ac2a25a95766 - Timestamp: time.Unix(1293623863, 0), // 2010-12-29 11:57:43 +0000 UTC - Bits: 0x1b04864c, // 453281356 - Nonce: 0x10572b0f, // 274148111 - }, - Transactions: []*wire.MsgTx{ - { - Version: 1, - TxIn: []*wire.TxIn{ - { - PreviousOutPoint: wire.OutPoint{ - Hash: chainhash.Hash{}, - Index: 0xffffffff, - }, - SignatureScript: []byte{ - 0x04, 0x4c, 0x86, 0x04, 0x1b, 0x02, 0x06, 0x02, - }, - Sequence: 0xffffffff, - }, - }, - TxOut: []*wire.TxOut{ - { - Value: 0x12a05f200, // 5000000000 - PkScript: []byte{ - 0x41, // OP_DATA_65 - 0x04, 0x1b, 0x0e, 0x8c, 0x25, 0x67, 0xc1, 0x25, - 0x36, 0xaa, 0x13, 0x35, 0x7b, 0x79, 0xa0, 0x73, - 0xdc, 0x44, 0x44, 0xac, 0xb8, 0x3c, 0x4e, 0xc7, - 0xa0, 0xe2, 0xf9, 0x9d, 0xd7, 0x45, 0x75, 0x16, - 0xc5, 0x81, 0x72, 0x42, 0xda, 0x79, 0x69, 0x24, - 0xca, 0x4e, 0x99, 0x94, 0x7d, 0x08, 0x7f, 0xed, - 0xf9, 0xce, 0x46, 0x7c, 0xb9, 0xf7, 0xc6, 0x28, - 0x70, 0x78, 0xf8, 0x01, 0xdf, 0x27, 0x6f, 0xdf, - 0x84, // 65-byte signature - 0xac, // OP_CHECKSIG - }, - }, - }, - LockTime: 0, - }, - { - Version: 1, - TxIn: []*wire.TxIn{ - { - PreviousOutPoint: wire.OutPoint{ - Hash: chainhash.Hash([32]byte{ // Make go vet happy. - 0x03, 0x2e, 0x38, 0xe9, 0xc0, 0xa8, 0x4c, 0x60, - 0x46, 0xd6, 0x87, 0xd1, 0x05, 0x56, 0xdc, 0xac, - 0xc4, 0x1d, 0x27, 0x5e, 0xc5, 0x5f, 0xc0, 0x07, - 0x79, 0xac, 0x88, 0xfd, 0xf3, 0x57, 0xa1, 0x87, - }), // 87a157f3fd88ac7907c05fc55e271dc4acdc5605d187d646604ca8c0e9382e03 - Index: 0, - }, - SignatureScript: []byte{ - 0x49, // OP_DATA_73 - 0x30, 0x46, 0x02, 0x21, 0x00, 0xc3, 0x52, 0xd3, - 0xdd, 0x99, 0x3a, 0x98, 0x1b, 0xeb, 0xa4, 0xa6, - 0x3a, 0xd1, 0x5c, 0x20, 0x92, 0x75, 0xca, 0x94, - 0x70, 0xab, 0xfc, 0xd5, 0x7d, 0xa9, 0x3b, 0x58, - 0xe4, 0xeb, 0x5d, 0xce, 0x82, 0x02, 0x21, 0x00, - 0x84, 0x07, 0x92, 0xbc, 0x1f, 0x45, 0x60, 0x62, - 0x81, 0x9f, 0x15, 0xd3, 0x3e, 0xe7, 0x05, 0x5c, - 0xf7, 0xb5, 0xee, 0x1a, 0xf1, 0xeb, 0xcc, 0x60, - 0x28, 0xd9, 0xcd, 0xb1, 0xc3, 0xaf, 0x77, 0x48, - 0x01, // 73-byte signature - 0x41, // OP_DATA_65 - 0x04, 0xf4, 0x6d, 0xb5, 0xe9, 0xd6, 0x1a, 0x9d, - 0xc2, 0x7b, 0x8d, 0x64, 0xad, 0x23, 0xe7, 0x38, - 0x3a, 0x4e, 0x6c, 0xa1, 0x64, 0x59, 0x3c, 0x25, - 0x27, 0xc0, 0x38, 0xc0, 0x85, 0x7e, 0xb6, 0x7e, - 0xe8, 0xe8, 0x25, 0xdc, 0xa6, 0x50, 0x46, 0xb8, - 0x2c, 0x93, 0x31, 0x58, 0x6c, 0x82, 0xe0, 0xfd, - 0x1f, 0x63, 0x3f, 0x25, 0xf8, 0x7c, 0x16, 0x1b, - 0xc6, 0xf8, 0xa6, 0x30, 0x12, 0x1d, 0xf2, 0xb3, - 0xd3, // 65-byte pubkey - }, - Sequence: 0xffffffff, - }, - }, - TxOut: []*wire.TxOut{ - { - Value: 0x2123e300, // 556000000 - PkScript: []byte{ - 0x76, // OP_DUP - 0xa9, // OP_HASH160 - 0x14, // OP_DATA_20 - 0xc3, 0x98, 0xef, 0xa9, 0xc3, 0x92, 0xba, 0x60, - 0x13, 0xc5, 0xe0, 0x4e, 0xe7, 0x29, 0x75, 0x5e, - 0xf7, 0xf5, 0x8b, 0x32, - 0x88, // OP_EQUALVERIFY - 0xac, // OP_CHECKSIG - }, - }, - { - Value: 0x108e20f00, // 4444000000 - PkScript: []byte{ - 0x76, // OP_DUP - 0xa9, // OP_HASH160 - 0x14, // OP_DATA_20 - 0x94, 0x8c, 0x76, 0x5a, 0x69, 0x14, 0xd4, 0x3f, - 0x2a, 0x7a, 0xc1, 0x77, 0xda, 0x2c, 0x2f, 0x6b, - 0x52, 0xde, 0x3d, 0x7c, - 0x88, // OP_EQUALVERIFY - 0xac, // OP_CHECKSIG - }, - }, - }, - LockTime: 0, - }, - { - Version: 1, - TxIn: []*wire.TxIn{ - { - PreviousOutPoint: wire.OutPoint{ - Hash: chainhash.Hash([32]byte{ // Make go vet happy. - 0xc3, 0x3e, 0xbf, 0xf2, 0xa7, 0x09, 0xf1, 0x3d, - 0x9f, 0x9a, 0x75, 0x69, 0xab, 0x16, 0xa3, 0x27, - 0x86, 0xaf, 0x7d, 0x7e, 0x2d, 0xe0, 0x92, 0x65, - 0xe4, 0x1c, 0x61, 0xd0, 0x78, 0x29, 0x4e, 0xcf, - }), // cf4e2978d0611ce46592e02d7e7daf8627a316ab69759a9f3df109a7f2bf3ec3 - Index: 1, - }, - SignatureScript: []byte{ - 0x47, // OP_DATA_71 - 0x30, 0x44, 0x02, 0x20, 0x03, 0x2d, 0x30, 0xdf, - 0x5e, 0xe6, 0xf5, 0x7f, 0xa4, 0x6c, 0xdd, 0xb5, - 0xeb, 0x8d, 0x0d, 0x9f, 0xe8, 0xde, 0x6b, 0x34, - 0x2d, 0x27, 0x94, 0x2a, 0xe9, 0x0a, 0x32, 0x31, - 0xe0, 0xba, 0x33, 0x3e, 0x02, 0x20, 0x3d, 0xee, - 0xe8, 0x06, 0x0f, 0xdc, 0x70, 0x23, 0x0a, 0x7f, - 0x5b, 0x4a, 0xd7, 0xd7, 0xbc, 0x3e, 0x62, 0x8c, - 0xbe, 0x21, 0x9a, 0x88, 0x6b, 0x84, 0x26, 0x9e, - 0xae, 0xb8, 0x1e, 0x26, 0xb4, 0xfe, 0x01, - 0x41, // OP_DATA_65 - 0x04, 0xae, 0x31, 0xc3, 0x1b, 0xf9, 0x12, 0x78, - 0xd9, 0x9b, 0x83, 0x77, 0xa3, 0x5b, 0xbc, 0xe5, - 0xb2, 0x7d, 0x9f, 0xff, 0x15, 0x45, 0x68, 0x39, - 0xe9, 0x19, 0x45, 0x3f, 0xc7, 0xb3, 0xf7, 0x21, - 0xf0, 0xba, 0x40, 0x3f, 0xf9, 0x6c, 0x9d, 0xee, - 0xb6, 0x80, 0xe5, 0xfd, 0x34, 0x1c, 0x0f, 0xc3, - 0xa7, 0xb9, 0x0d, 0xa4, 0x63, 0x1e, 0xe3, 0x95, - 0x60, 0x63, 0x9d, 0xb4, 0x62, 0xe9, 0xcb, 0x85, - 0x0f, // 65-byte pubkey - }, - Sequence: 0xffffffff, - }, - }, - TxOut: []*wire.TxOut{ - { - Value: 0xf4240, // 1000000 - PkScript: []byte{ - 0x76, // OP_DUP - 0xa9, // OP_HASH160 - 0x14, // OP_DATA_20 - 0xb0, 0xdc, 0xbf, 0x97, 0xea, 0xbf, 0x44, 0x04, - 0xe3, 0x1d, 0x95, 0x24, 0x77, 0xce, 0x82, 0x2d, - 0xad, 0xbe, 0x7e, 0x10, - 0x88, // OP_EQUALVERIFY - 0xac, // OP_CHECKSIG - }, - }, - { - Value: 0x11d260c0, // 299000000 - PkScript: []byte{ - 0x76, // OP_DUP - 0xa9, // OP_HASH160 - 0x14, // OP_DATA_20 - 0x6b, 0x12, 0x81, 0xee, 0xc2, 0x5a, 0xb4, 0xe1, - 0xe0, 0x79, 0x3f, 0xf4, 0xe0, 0x8a, 0xb1, 0xab, - 0xb3, 0x40, 0x9c, 0xd9, - 0x88, // OP_EQUALVERIFY - 0xac, // OP_CHECKSIG - }, - }, - }, - LockTime: 0, - }, - }, - } - MockTransactions = []*btcutil.Tx{ - btcutil.NewTx(MockBlock.Transactions[0]), - btcutil.NewTx(MockBlock.Transactions[1]), - btcutil.NewTx(MockBlock.Transactions[2]), - } - MockBlockPayload = btc.BlockPayload{ - Header: &MockBlock.Header, - Txs: MockTransactions, - BlockHeight: MockBlockHeight, - } - sClass1, addresses1, numOfSigs1, _ = txscript.ExtractPkScriptAddrs([]byte{ - 0x41, // OP_DATA_65 - 0x04, 0x1b, 0x0e, 0x8c, 0x25, 0x67, 0xc1, 0x25, - 0x36, 0xaa, 0x13, 0x35, 0x7b, 0x79, 0xa0, 0x73, - 0xdc, 0x44, 0x44, 0xac, 0xb8, 0x3c, 0x4e, 0xc7, - 0xa0, 0xe2, 0xf9, 0x9d, 0xd7, 0x45, 0x75, 0x16, - 0xc5, 0x81, 0x72, 0x42, 0xda, 0x79, 0x69, 0x24, - 0xca, 0x4e, 0x99, 0x94, 0x7d, 0x08, 0x7f, 0xed, - 0xf9, 0xce, 0x46, 0x7c, 0xb9, 0xf7, 0xc6, 0x28, - 0x70, 0x78, 0xf8, 0x01, 0xdf, 0x27, 0x6f, 0xdf, - 0x84, // 65-byte signature - 0xac, // OP_CHECKSIG - }, &chaincfg.MainNetParams) - sClass2a, addresses2a, numOfSigs2a, _ = txscript.ExtractPkScriptAddrs([]byte{ - 0x76, // OP_DUP - 0xa9, // OP_HASH160 - 0x14, // OP_DATA_20 - 0xc3, 0x98, 0xef, 0xa9, 0xc3, 0x92, 0xba, 0x60, - 0x13, 0xc5, 0xe0, 0x4e, 0xe7, 0x29, 0x75, 0x5e, - 0xf7, 0xf5, 0x8b, 0x32, - 0x88, // OP_EQUALVERIFY - 0xac, // OP_CHECKSIG - }, &chaincfg.MainNetParams) - sClass2b, addresses2b, numOfSigs2b, _ = txscript.ExtractPkScriptAddrs([]byte{ - 0x76, // OP_DUP - 0xa9, // OP_HASH160 - 0x14, // OP_DATA_20 - 0x94, 0x8c, 0x76, 0x5a, 0x69, 0x14, 0xd4, 0x3f, - 0x2a, 0x7a, 0xc1, 0x77, 0xda, 0x2c, 0x2f, 0x6b, - 0x52, 0xde, 0x3d, 0x7c, - 0x88, // OP_EQUALVERIFY - 0xac, // OP_CHECKSIG - }, &chaincfg.MainNetParams) - sClass3a, addresses3a, numOfSigs3a, _ = txscript.ExtractPkScriptAddrs([]byte{ - 0x76, // OP_DUP - 0xa9, // OP_HASH160 - 0x14, // OP_DATA_20 - 0xb0, 0xdc, 0xbf, 0x97, 0xea, 0xbf, 0x44, 0x04, - 0xe3, 0x1d, 0x95, 0x24, 0x77, 0xce, 0x82, 0x2d, - 0xad, 0xbe, 0x7e, 0x10, - 0x88, // OP_EQUALVERIFY - 0xac, // OP_CHECKSIG - }, &chaincfg.MainNetParams) - sClass3b, addresses3b, numOfSigs3b, _ = txscript.ExtractPkScriptAddrs([]byte{ - 0x76, // OP_DUP - 0xa9, // OP_HASH160 - 0x14, // OP_DATA_20 - 0x6b, 0x12, 0x81, 0xee, 0xc2, 0x5a, 0xb4, 0xe1, - 0xe0, 0x79, 0x3f, 0xf4, 0xe0, 0x8a, 0xb1, 0xab, - 0xb3, 0x40, 0x9c, 0xd9, - 0x88, // OP_EQUALVERIFY - 0xac, // OP_CHECKSIG - }, &chaincfg.MainNetParams) - MockTxsMetaData = []btc.TxModelWithInsAndOuts{ - { - TxHash: MockBlock.Transactions[0].TxHash().String(), - Index: 0, - SegWit: MockBlock.Transactions[0].HasWitness(), - TxInputs: []btc.TxInput{ - { - Index: 0, - SignatureScript: []byte{ - 0x04, 0x4c, 0x86, 0x04, 0x1b, 0x02, 0x06, 0x02, - }, - PreviousOutPointHash: chainhash.Hash{}.String(), - PreviousOutPointIndex: 0xffffffff, - }, - }, - TxOutputs: []btc.TxOutput{ - { - Value: 5000000000, - Index: 0, - PkScript: []byte{ - 0x41, // OP_DATA_65 - 0x04, 0x1b, 0x0e, 0x8c, 0x25, 0x67, 0xc1, 0x25, - 0x36, 0xaa, 0x13, 0x35, 0x7b, 0x79, 0xa0, 0x73, - 0xdc, 0x44, 0x44, 0xac, 0xb8, 0x3c, 0x4e, 0xc7, - 0xa0, 0xe2, 0xf9, 0x9d, 0xd7, 0x45, 0x75, 0x16, - 0xc5, 0x81, 0x72, 0x42, 0xda, 0x79, 0x69, 0x24, - 0xca, 0x4e, 0x99, 0x94, 0x7d, 0x08, 0x7f, 0xed, - 0xf9, 0xce, 0x46, 0x7c, 0xb9, 0xf7, 0xc6, 0x28, - 0x70, 0x78, 0xf8, 0x01, 0xdf, 0x27, 0x6f, 0xdf, - 0x84, // 65-byte signature - 0xac, // OP_CHECKSIG - }, - ScriptClass: uint8(sClass1), - RequiredSigs: int64(numOfSigs1), - Addresses: stringSliceFromAddresses(addresses1), - }, - }, - }, - { - TxHash: MockBlock.Transactions[1].TxHash().String(), - Index: 1, - SegWit: MockBlock.Transactions[1].HasWitness(), - TxInputs: []btc.TxInput{ - { - Index: 0, - PreviousOutPointHash: chainhash.Hash([32]byte{ // Make go vet happy. - 0x03, 0x2e, 0x38, 0xe9, 0xc0, 0xa8, 0x4c, 0x60, - 0x46, 0xd6, 0x87, 0xd1, 0x05, 0x56, 0xdc, 0xac, - 0xc4, 0x1d, 0x27, 0x5e, 0xc5, 0x5f, 0xc0, 0x07, - 0x79, 0xac, 0x88, 0xfd, 0xf3, 0x57, 0xa1, 0x87, - }).String(), - PreviousOutPointIndex: 0, - SignatureScript: []byte{ - 0x49, // OP_DATA_73 - 0x30, 0x46, 0x02, 0x21, 0x00, 0xc3, 0x52, 0xd3, - 0xdd, 0x99, 0x3a, 0x98, 0x1b, 0xeb, 0xa4, 0xa6, - 0x3a, 0xd1, 0x5c, 0x20, 0x92, 0x75, 0xca, 0x94, - 0x70, 0xab, 0xfc, 0xd5, 0x7d, 0xa9, 0x3b, 0x58, - 0xe4, 0xeb, 0x5d, 0xce, 0x82, 0x02, 0x21, 0x00, - 0x84, 0x07, 0x92, 0xbc, 0x1f, 0x45, 0x60, 0x62, - 0x81, 0x9f, 0x15, 0xd3, 0x3e, 0xe7, 0x05, 0x5c, - 0xf7, 0xb5, 0xee, 0x1a, 0xf1, 0xeb, 0xcc, 0x60, - 0x28, 0xd9, 0xcd, 0xb1, 0xc3, 0xaf, 0x77, 0x48, - 0x01, // 73-byte signature - 0x41, // OP_DATA_65 - 0x04, 0xf4, 0x6d, 0xb5, 0xe9, 0xd6, 0x1a, 0x9d, - 0xc2, 0x7b, 0x8d, 0x64, 0xad, 0x23, 0xe7, 0x38, - 0x3a, 0x4e, 0x6c, 0xa1, 0x64, 0x59, 0x3c, 0x25, - 0x27, 0xc0, 0x38, 0xc0, 0x85, 0x7e, 0xb6, 0x7e, - 0xe8, 0xe8, 0x25, 0xdc, 0xa6, 0x50, 0x46, 0xb8, - 0x2c, 0x93, 0x31, 0x58, 0x6c, 0x82, 0xe0, 0xfd, - 0x1f, 0x63, 0x3f, 0x25, 0xf8, 0x7c, 0x16, 0x1b, - 0xc6, 0xf8, 0xa6, 0x30, 0x12, 0x1d, 0xf2, 0xb3, - 0xd3, // 65-byte pubkey - }, - }, - }, - TxOutputs: []btc.TxOutput{ - { - Index: 0, - Value: 556000000, - PkScript: []byte{ - 0x76, // OP_DUP - 0xa9, // OP_HASH160 - 0x14, // OP_DATA_20 - 0xc3, 0x98, 0xef, 0xa9, 0xc3, 0x92, 0xba, 0x60, - 0x13, 0xc5, 0xe0, 0x4e, 0xe7, 0x29, 0x75, 0x5e, - 0xf7, 0xf5, 0x8b, 0x32, - 0x88, // OP_EQUALVERIFY - 0xac, // OP_CHECKSIG - }, - ScriptClass: uint8(sClass2a), - RequiredSigs: int64(numOfSigs2a), - Addresses: stringSliceFromAddresses(addresses2a), - }, - { - Index: 1, - Value: 4444000000, - PkScript: []byte{ - 0x76, // OP_DUP - 0xa9, // OP_HASH160 - 0x14, // OP_DATA_20 - 0x94, 0x8c, 0x76, 0x5a, 0x69, 0x14, 0xd4, 0x3f, - 0x2a, 0x7a, 0xc1, 0x77, 0xda, 0x2c, 0x2f, 0x6b, - 0x52, 0xde, 0x3d, 0x7c, - 0x88, // OP_EQUALVERIFY - 0xac, // OP_CHECKSIG - }, - ScriptClass: uint8(sClass2b), - RequiredSigs: int64(numOfSigs2b), - Addresses: stringSliceFromAddresses(addresses2b), - }, - }, - }, - { - TxHash: MockBlock.Transactions[2].TxHash().String(), - Index: 2, - SegWit: MockBlock.Transactions[2].HasWitness(), - TxInputs: []btc.TxInput{ - { - Index: 0, - PreviousOutPointHash: chainhash.Hash([32]byte{ // Make go vet happy. - 0xc3, 0x3e, 0xbf, 0xf2, 0xa7, 0x09, 0xf1, 0x3d, - 0x9f, 0x9a, 0x75, 0x69, 0xab, 0x16, 0xa3, 0x27, - 0x86, 0xaf, 0x7d, 0x7e, 0x2d, 0xe0, 0x92, 0x65, - 0xe4, 0x1c, 0x61, 0xd0, 0x78, 0x29, 0x4e, 0xcf, - }).String(), - PreviousOutPointIndex: 1, - SignatureScript: []byte{ - 0x47, // OP_DATA_71 - 0x30, 0x44, 0x02, 0x20, 0x03, 0x2d, 0x30, 0xdf, - 0x5e, 0xe6, 0xf5, 0x7f, 0xa4, 0x6c, 0xdd, 0xb5, - 0xeb, 0x8d, 0x0d, 0x9f, 0xe8, 0xde, 0x6b, 0x34, - 0x2d, 0x27, 0x94, 0x2a, 0xe9, 0x0a, 0x32, 0x31, - 0xe0, 0xba, 0x33, 0x3e, 0x02, 0x20, 0x3d, 0xee, - 0xe8, 0x06, 0x0f, 0xdc, 0x70, 0x23, 0x0a, 0x7f, - 0x5b, 0x4a, 0xd7, 0xd7, 0xbc, 0x3e, 0x62, 0x8c, - 0xbe, 0x21, 0x9a, 0x88, 0x6b, 0x84, 0x26, 0x9e, - 0xae, 0xb8, 0x1e, 0x26, 0xb4, 0xfe, 0x01, - 0x41, // OP_DATA_65 - 0x04, 0xae, 0x31, 0xc3, 0x1b, 0xf9, 0x12, 0x78, - 0xd9, 0x9b, 0x83, 0x77, 0xa3, 0x5b, 0xbc, 0xe5, - 0xb2, 0x7d, 0x9f, 0xff, 0x15, 0x45, 0x68, 0x39, - 0xe9, 0x19, 0x45, 0x3f, 0xc7, 0xb3, 0xf7, 0x21, - 0xf0, 0xba, 0x40, 0x3f, 0xf9, 0x6c, 0x9d, 0xee, - 0xb6, 0x80, 0xe5, 0xfd, 0x34, 0x1c, 0x0f, 0xc3, - 0xa7, 0xb9, 0x0d, 0xa4, 0x63, 0x1e, 0xe3, 0x95, - 0x60, 0x63, 0x9d, 0xb4, 0x62, 0xe9, 0xcb, 0x85, - 0x0f, // 65-byte pubkey - }, - }, - }, - TxOutputs: []btc.TxOutput{ - { - Index: 0, - Value: 1000000, - PkScript: []byte{ - 0x76, // OP_DUP - 0xa9, // OP_HASH160 - 0x14, // OP_DATA_20 - 0xb0, 0xdc, 0xbf, 0x97, 0xea, 0xbf, 0x44, 0x04, - 0xe3, 0x1d, 0x95, 0x24, 0x77, 0xce, 0x82, 0x2d, - 0xad, 0xbe, 0x7e, 0x10, - 0x88, // OP_EQUALVERIFY - 0xac, // OP_CHECKSIG - }, - ScriptClass: uint8(sClass3a), - RequiredSigs: int64(numOfSigs3a), - Addresses: stringSliceFromAddresses(addresses3a), - }, - { - Index: 1, - Value: 299000000, - PkScript: []byte{ - 0x76, // OP_DUP - 0xa9, // OP_HASH160 - 0x14, // OP_DATA_20 - 0x6b, 0x12, 0x81, 0xee, 0xc2, 0x5a, 0xb4, 0xe1, - 0xe0, 0x79, 0x3f, 0xf4, 0xe0, 0x8a, 0xb1, 0xab, - 0xb3, 0x40, 0x9c, 0xd9, - 0x88, // OP_EQUALVERIFY - 0xac, // OP_CHECKSIG - }, - ScriptClass: uint8(sClass3b), - RequiredSigs: int64(numOfSigs3b), - Addresses: stringSliceFromAddresses(addresses3b), - }, - }, - }, - } - MockTxsMetaDataPostPublish = []btc.TxModelWithInsAndOuts{ - { - CID: MockTrxCID1.String(), - MhKey: MockTrxMhKey1, - TxHash: MockBlock.Transactions[0].TxHash().String(), - Index: 0, - SegWit: MockBlock.Transactions[0].HasWitness(), - TxInputs: []btc.TxInput{ - { - Index: 0, - SignatureScript: []byte{ - 0x04, 0x4c, 0x86, 0x04, 0x1b, 0x02, 0x06, 0x02, - }, - PreviousOutPointHash: chainhash.Hash{}.String(), - PreviousOutPointIndex: 0xffffffff, - }, - }, - TxOutputs: []btc.TxOutput{ - { - Value: 5000000000, - Index: 0, - PkScript: []byte{ - 0x41, // OP_DATA_65 - 0x04, 0x1b, 0x0e, 0x8c, 0x25, 0x67, 0xc1, 0x25, - 0x36, 0xaa, 0x13, 0x35, 0x7b, 0x79, 0xa0, 0x73, - 0xdc, 0x44, 0x44, 0xac, 0xb8, 0x3c, 0x4e, 0xc7, - 0xa0, 0xe2, 0xf9, 0x9d, 0xd7, 0x45, 0x75, 0x16, - 0xc5, 0x81, 0x72, 0x42, 0xda, 0x79, 0x69, 0x24, - 0xca, 0x4e, 0x99, 0x94, 0x7d, 0x08, 0x7f, 0xed, - 0xf9, 0xce, 0x46, 0x7c, 0xb9, 0xf7, 0xc6, 0x28, - 0x70, 0x78, 0xf8, 0x01, 0xdf, 0x27, 0x6f, 0xdf, - 0x84, // 65-byte signature - 0xac, // OP_CHECKSIG - }, - ScriptClass: uint8(sClass1), - RequiredSigs: int64(numOfSigs1), - Addresses: stringSliceFromAddresses(addresses1), - }, - }, - }, - { - CID: MockTrxCID2.String(), - MhKey: MockTrxMhKey2, - TxHash: MockBlock.Transactions[1].TxHash().String(), - Index: 1, - SegWit: MockBlock.Transactions[1].HasWitness(), - TxInputs: []btc.TxInput{ - { - Index: 0, - PreviousOutPointHash: chainhash.Hash([32]byte{ // Make go vet happy. - 0x03, 0x2e, 0x38, 0xe9, 0xc0, 0xa8, 0x4c, 0x60, - 0x46, 0xd6, 0x87, 0xd1, 0x05, 0x56, 0xdc, 0xac, - 0xc4, 0x1d, 0x27, 0x5e, 0xc5, 0x5f, 0xc0, 0x07, - 0x79, 0xac, 0x88, 0xfd, 0xf3, 0x57, 0xa1, 0x87, - }).String(), - PreviousOutPointIndex: 0, - SignatureScript: []byte{ - 0x49, // OP_DATA_73 - 0x30, 0x46, 0x02, 0x21, 0x00, 0xc3, 0x52, 0xd3, - 0xdd, 0x99, 0x3a, 0x98, 0x1b, 0xeb, 0xa4, 0xa6, - 0x3a, 0xd1, 0x5c, 0x20, 0x92, 0x75, 0xca, 0x94, - 0x70, 0xab, 0xfc, 0xd5, 0x7d, 0xa9, 0x3b, 0x58, - 0xe4, 0xeb, 0x5d, 0xce, 0x82, 0x02, 0x21, 0x00, - 0x84, 0x07, 0x92, 0xbc, 0x1f, 0x45, 0x60, 0x62, - 0x81, 0x9f, 0x15, 0xd3, 0x3e, 0xe7, 0x05, 0x5c, - 0xf7, 0xb5, 0xee, 0x1a, 0xf1, 0xeb, 0xcc, 0x60, - 0x28, 0xd9, 0xcd, 0xb1, 0xc3, 0xaf, 0x77, 0x48, - 0x01, // 73-byte signature - 0x41, // OP_DATA_65 - 0x04, 0xf4, 0x6d, 0xb5, 0xe9, 0xd6, 0x1a, 0x9d, - 0xc2, 0x7b, 0x8d, 0x64, 0xad, 0x23, 0xe7, 0x38, - 0x3a, 0x4e, 0x6c, 0xa1, 0x64, 0x59, 0x3c, 0x25, - 0x27, 0xc0, 0x38, 0xc0, 0x85, 0x7e, 0xb6, 0x7e, - 0xe8, 0xe8, 0x25, 0xdc, 0xa6, 0x50, 0x46, 0xb8, - 0x2c, 0x93, 0x31, 0x58, 0x6c, 0x82, 0xe0, 0xfd, - 0x1f, 0x63, 0x3f, 0x25, 0xf8, 0x7c, 0x16, 0x1b, - 0xc6, 0xf8, 0xa6, 0x30, 0x12, 0x1d, 0xf2, 0xb3, - 0xd3, // 65-byte pubkey - }, - }, - }, - TxOutputs: []btc.TxOutput{ - { - Index: 0, - Value: 556000000, - PkScript: []byte{ - 0x76, // OP_DUP - 0xa9, // OP_HASH160 - 0x14, // OP_DATA_20 - 0xc3, 0x98, 0xef, 0xa9, 0xc3, 0x92, 0xba, 0x60, - 0x13, 0xc5, 0xe0, 0x4e, 0xe7, 0x29, 0x75, 0x5e, - 0xf7, 0xf5, 0x8b, 0x32, - 0x88, // OP_EQUALVERIFY - 0xac, // OP_CHECKSIG - }, - ScriptClass: uint8(sClass2a), - RequiredSigs: int64(numOfSigs2a), - Addresses: stringSliceFromAddresses(addresses2a), - }, - { - Index: 1, - Value: 4444000000, - PkScript: []byte{ - 0x76, // OP_DUP - 0xa9, // OP_HASH160 - 0x14, // OP_DATA_20 - 0x94, 0x8c, 0x76, 0x5a, 0x69, 0x14, 0xd4, 0x3f, - 0x2a, 0x7a, 0xc1, 0x77, 0xda, 0x2c, 0x2f, 0x6b, - 0x52, 0xde, 0x3d, 0x7c, - 0x88, // OP_EQUALVERIFY - 0xac, // OP_CHECKSIG - }, - ScriptClass: uint8(sClass2b), - RequiredSigs: int64(numOfSigs2b), - Addresses: stringSliceFromAddresses(addresses2b), - }, - }, - }, - { - CID: MockTrxCID3.String(), - MhKey: MockTrxMhKey3, - TxHash: MockBlock.Transactions[2].TxHash().String(), - Index: 2, - SegWit: MockBlock.Transactions[2].HasWitness(), - TxInputs: []btc.TxInput{ - { - Index: 0, - PreviousOutPointHash: chainhash.Hash([32]byte{ // Make go vet happy. - 0xc3, 0x3e, 0xbf, 0xf2, 0xa7, 0x09, 0xf1, 0x3d, - 0x9f, 0x9a, 0x75, 0x69, 0xab, 0x16, 0xa3, 0x27, - 0x86, 0xaf, 0x7d, 0x7e, 0x2d, 0xe0, 0x92, 0x65, - 0xe4, 0x1c, 0x61, 0xd0, 0x78, 0x29, 0x4e, 0xcf, - }).String(), - PreviousOutPointIndex: 1, - SignatureScript: []byte{ - 0x47, // OP_DATA_71 - 0x30, 0x44, 0x02, 0x20, 0x03, 0x2d, 0x30, 0xdf, - 0x5e, 0xe6, 0xf5, 0x7f, 0xa4, 0x6c, 0xdd, 0xb5, - 0xeb, 0x8d, 0x0d, 0x9f, 0xe8, 0xde, 0x6b, 0x34, - 0x2d, 0x27, 0x94, 0x2a, 0xe9, 0x0a, 0x32, 0x31, - 0xe0, 0xba, 0x33, 0x3e, 0x02, 0x20, 0x3d, 0xee, - 0xe8, 0x06, 0x0f, 0xdc, 0x70, 0x23, 0x0a, 0x7f, - 0x5b, 0x4a, 0xd7, 0xd7, 0xbc, 0x3e, 0x62, 0x8c, - 0xbe, 0x21, 0x9a, 0x88, 0x6b, 0x84, 0x26, 0x9e, - 0xae, 0xb8, 0x1e, 0x26, 0xb4, 0xfe, 0x01, - 0x41, // OP_DATA_65 - 0x04, 0xae, 0x31, 0xc3, 0x1b, 0xf9, 0x12, 0x78, - 0xd9, 0x9b, 0x83, 0x77, 0xa3, 0x5b, 0xbc, 0xe5, - 0xb2, 0x7d, 0x9f, 0xff, 0x15, 0x45, 0x68, 0x39, - 0xe9, 0x19, 0x45, 0x3f, 0xc7, 0xb3, 0xf7, 0x21, - 0xf0, 0xba, 0x40, 0x3f, 0xf9, 0x6c, 0x9d, 0xee, - 0xb6, 0x80, 0xe5, 0xfd, 0x34, 0x1c, 0x0f, 0xc3, - 0xa7, 0xb9, 0x0d, 0xa4, 0x63, 0x1e, 0xe3, 0x95, - 0x60, 0x63, 0x9d, 0xb4, 0x62, 0xe9, 0xcb, 0x85, - 0x0f, // 65-byte pubkey - }, - }, - }, - TxOutputs: []btc.TxOutput{ - { - Index: 0, - Value: 1000000, - PkScript: []byte{ - 0x76, // OP_DUP - 0xa9, // OP_HASH160 - 0x14, // OP_DATA_20 - 0xb0, 0xdc, 0xbf, 0x97, 0xea, 0xbf, 0x44, 0x04, - 0xe3, 0x1d, 0x95, 0x24, 0x77, 0xce, 0x82, 0x2d, - 0xad, 0xbe, 0x7e, 0x10, - 0x88, // OP_EQUALVERIFY - 0xac, // OP_CHECKSIG - }, - ScriptClass: uint8(sClass3a), - RequiredSigs: int64(numOfSigs3a), - Addresses: stringSliceFromAddresses(addresses3a), - }, - { - Index: 1, - Value: 299000000, - PkScript: []byte{ - 0x76, // OP_DUP - 0xa9, // OP_HASH160 - 0x14, // OP_DATA_20 - 0x6b, 0x12, 0x81, 0xee, 0xc2, 0x5a, 0xb4, 0xe1, - 0xe0, 0x79, 0x3f, 0xf4, 0xe0, 0x8a, 0xb1, 0xab, - 0xb3, 0x40, 0x9c, 0xd9, - 0x88, // OP_EQUALVERIFY - 0xac, // OP_CHECKSIG - }, - ScriptClass: uint8(sClass3b), - RequiredSigs: int64(numOfSigs3b), - Addresses: stringSliceFromAddresses(addresses3b), - }, - }, - }, - } - MockHeaderMetaData = btc.HeaderModel{ - CID: MockHeaderCID.String(), - MhKey: MockHeaderMhKey, - ParentHash: MockBlock.Header.PrevBlock.String(), - BlockNumber: strconv.Itoa(int(MockBlockHeight)), - BlockHash: MockBlock.Header.BlockHash().String(), - Timestamp: MockBlock.Header.Timestamp.UnixNano(), - Bits: MockBlock.Header.Bits, - } - MockConvertedPayload = btc.ConvertedPayload{ - BlockPayload: MockBlockPayload, - TxMetaData: MockTxsMetaData, - } - MockCIDPayload = btc.CIDPayload{ - HeaderCID: MockHeaderMetaData, - TransactionCIDs: MockTxsMetaDataPostPublish, - } -) - -func stringSliceFromAddresses(addrs []btcutil.Address) []string { - strs := make([]string, len(addrs)) - for i, addr := range addrs { - strs[i] = addr.EncodeAddress() - } - return strs -} diff --git a/pkg/btc/models.go b/pkg/btc/models.go deleted file mode 100644 index c2bbb81c..00000000 --- a/pkg/btc/models.go +++ /dev/null @@ -1,82 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package btc - -import "github.com/lib/pq" - -// HeaderModel is the db model for btc.header_cids table -type HeaderModel struct { - ID int64 `db:"id"` - BlockNumber string `db:"block_number"` - BlockHash string `db:"block_hash"` - ParentHash string `db:"parent_hash"` - CID string `db:"cid"` - MhKey string `db:"mh_key"` - Timestamp int64 `db:"timestamp"` - Bits uint32 `db:"bits"` - NodeID int64 `db:"node_id"` - TimesValidated int64 `db:"times_validated"` -} - -// TxModel is the db model for btc.transaction_cids table -type TxModel struct { - ID int64 `db:"id"` - HeaderID int64 `db:"header_id"` - Index int64 `db:"index"` - TxHash string `db:"tx_hash"` - CID string `db:"cid"` - MhKey string `db:"mh_key"` - SegWit bool `db:"segwit"` - WitnessHash string `db:"witness_hash"` -} - -// TxModelWithInsAndOuts is the db model for btc.transaction_cids table that includes the children tx_input and tx_output tables -type TxModelWithInsAndOuts struct { - ID int64 `db:"id"` - HeaderID int64 `db:"header_id"` - Index int64 `db:"index"` - TxHash string `db:"tx_hash"` - CID string `db:"cid"` - MhKey string `db:"mh_key"` - SegWit bool `db:"segwit"` - WitnessHash string `db:"witness_hash"` - TxInputs []TxInput - TxOutputs []TxOutput -} - -// TxInput is the db model for btc.tx_inputs table -type TxInput struct { - ID int64 `db:"id"` - TxID int64 `db:"tx_id"` - Index int64 `db:"index"` - TxWitness []string `db:"witness"` - SignatureScript []byte `db:"sig_script"` - PreviousOutPointIndex uint32 `db:"outpoint_tx_hash"` - PreviousOutPointHash string `db:"outpoint_index"` -} - -// TxOutput is the db model for btc.tx_outputs table -type TxOutput struct { - ID int64 `db:"id"` - TxID int64 `db:"tx_id"` - Index int64 `db:"index"` - Value int64 `db:"value"` - PkScript []byte `db:"pk_script"` - ScriptClass uint8 `db:"script_class"` - RequiredSigs int64 `db:"required_sigs"` - Addresses pq.StringArray `db:"addresses"` -} diff --git a/pkg/btc/payload_fetcher.go b/pkg/btc/payload_fetcher.go deleted file mode 100644 index c1e6e4dd..00000000 --- a/pkg/btc/payload_fetcher.go +++ /dev/null @@ -1,76 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package btc - -import ( - "fmt" - - "github.com/btcsuite/btcd/rpcclient" - "github.com/btcsuite/btcd/wire" - "github.com/btcsuite/btcutil" - - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/shared" -) - -// PayloadFetcher satisfies the PayloadFetcher interface for bitcoin -type PayloadFetcher struct { - // PayloadFetcher is thread-safe as long as the underlying client is thread-safe, since it has/modifies no other state - // http.Client is thread-safe - client *rpcclient.Client -} - -// NewStateDiffFetcher returns a PayloadFetcher -func NewPayloadFetcher(c *rpcclient.ConnConfig) (*PayloadFetcher, error) { - client, err := rpcclient.New(c, nil) - if err != nil { - return nil, err - } - return &PayloadFetcher{ - client: client, - }, nil -} - -// FetchAt fetches the block payloads at the given block heights -func (fetcher *PayloadFetcher) FetchAt(blockHeights []uint64) ([]shared.RawChainData, error) { - blockPayloads := make([]shared.RawChainData, len(blockHeights)) - for i, height := range blockHeights { - hash, err := fetcher.client.GetBlockHash(int64(height)) - if err != nil { - return nil, fmt.Errorf("bitcoin PayloadFetcher GetBlockHash err at blockheight %d: %s", height, err.Error()) - } - block, err := fetcher.client.GetBlock(hash) - if err != nil { - return nil, fmt.Errorf("bitcoin PayloadFetcher GetBlock err at blockheight %d: %s", height, err.Error()) - } - blockPayloads[i] = BlockPayload{ - BlockHeight: int64(height), - Header: &block.Header, - Txs: msgTxsToUtilTxs(block.Transactions), - } - } - return blockPayloads, nil -} - -func msgTxsToUtilTxs(msgs []*wire.MsgTx) []*btcutil.Tx { - txs := make([]*btcutil.Tx, len(msgs)) - for i, msg := range msgs { - tx := btcutil.NewTx(msg) - tx.SetIndex(i) - txs[i] = tx - } - return txs -} diff --git a/pkg/btc/publisher.go b/pkg/btc/publisher.go deleted file mode 100644 index 74a3ba56..00000000 --- a/pkg/btc/publisher.go +++ /dev/null @@ -1,120 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package btc - -import ( - "fmt" - "strconv" - - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/ipfs/ipld" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/postgres" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/shared" -) - -// IPLDPublisher satisfies the IPLDPublisher interface for bitcoin -// It interfaces directly with the public.blocks table of PG-IPFS rather than going through an ipfs intermediary -// It publishes and indexes IPLDs together in a single sqlx.Tx -type IPLDPublisher struct { - indexer *CIDIndexer -} - -// NewIPLDPublisher creates a pointer to a new eth IPLDPublisher which satisfies the IPLDPublisher interface -func NewIPLDPublisher(db *postgres.DB) *IPLDPublisher { - return &IPLDPublisher{ - indexer: NewCIDIndexer(db), - } -} - -// Publish publishes an IPLDPayload to IPFS and returns the corresponding CIDPayload -func (pub *IPLDPublisher) Publish(payload shared.ConvertedData) error { - ipldPayload, ok := payload.(ConvertedPayload) - if !ok { - return fmt.Errorf("btc publisher expected payload type %T got %T", ConvertedPayload{}, payload) - } - // Generate the iplds - headerNode, txNodes, txTrieNodes, err := ipld.FromHeaderAndTxs(ipldPayload.Header, ipldPayload.Txs) - if err != nil { - return err - } - - // Begin new db tx - tx, err := pub.indexer.db.Beginx() - if err != nil { - return err - } - defer func() { - if p := recover(); p != nil { - shared.Rollback(tx) - panic(p) - } else if err != nil { - shared.Rollback(tx) - } else { - err = tx.Commit() - } - }() - - // Publish trie nodes - for _, node := range txTrieNodes { - if err := shared.PublishIPLD(tx, node); err != nil { - return err - } - } - - // Publish and index header - if err := shared.PublishIPLD(tx, headerNode); err != nil { - return err - } - header := HeaderModel{ - CID: headerNode.Cid().String(), - MhKey: shared.MultihashKeyFromCID(headerNode.Cid()), - ParentHash: ipldPayload.Header.PrevBlock.String(), - BlockNumber: strconv.Itoa(int(ipldPayload.BlockPayload.BlockHeight)), - BlockHash: ipldPayload.Header.BlockHash().String(), - Timestamp: ipldPayload.Header.Timestamp.UnixNano(), - Bits: ipldPayload.Header.Bits, - } - headerID, err := pub.indexer.indexHeaderCID(tx, header) - if err != nil { - return err - } - - // Publish and index txs - for i, txNode := range txNodes { - if err := shared.PublishIPLD(tx, txNode); err != nil { - return err - } - txModel := ipldPayload.TxMetaData[i] - txModel.CID = txNode.Cid().String() - txModel.MhKey = shared.MultihashKeyFromCID(txNode.Cid()) - txID, err := pub.indexer.indexTransactionCID(tx, txModel, headerID) - if err != nil { - return err - } - for _, input := range txModel.TxInputs { - if err := pub.indexer.indexTxInput(tx, input, txID); err != nil { - return err - } - } - for _, output := range txModel.TxOutputs { - if err := pub.indexer.indexTxOutput(tx, output, txID); err != nil { - return err - } - } - } - - return err -} diff --git a/pkg/btc/publisher_test.go b/pkg/btc/publisher_test.go deleted file mode 100644 index b2277b6d..00000000 --- a/pkg/btc/publisher_test.go +++ /dev/null @@ -1,120 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package btc_test - -import ( - "bytes" - - "github.com/ipfs/go-cid" - "github.com/ipfs/go-ipfs-blockstore" - "github.com/ipfs/go-ipfs-ds-help" - "github.com/multiformats/go-multihash" - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/ipfs/ipld" - - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/btc" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/btc/mocks" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/postgres" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/shared" -) - -var _ = Describe("PublishAndIndexer", func() { - var ( - db *postgres.DB - err error - repo *btc.IPLDPublisher - ipfsPgGet = `SELECT data FROM public.blocks - WHERE key = $1` - ) - BeforeEach(func() { - db, err = shared.SetupDB() - Expect(err).ToNot(HaveOccurred()) - repo = btc.NewIPLDPublisher(db) - }) - AfterEach(func() { - btc.TearDownDB(db) - }) - - Describe("Publish", func() { - It("Published and indexes header and transaction IPLDs in a single tx", func() { - err = repo.Publish(mocks.MockConvertedPayload) - Expect(err).ToNot(HaveOccurred()) - pgStr := `SELECT * FROM btc.header_cids - WHERE block_number = $1` - // check header was properly indexed - buf := bytes.NewBuffer(make([]byte, 0, 80)) - err = mocks.MockBlock.Header.Serialize(buf) - Expect(err).ToNot(HaveOccurred()) - headerBytes := buf.Bytes() - c, _ := ipld.RawdataToCid(ipld.MBitcoinHeader, headerBytes, multihash.DBL_SHA2_256) - header := new(btc.HeaderModel) - err = db.Get(header, pgStr, mocks.MockHeaderMetaData.BlockNumber) - Expect(err).ToNot(HaveOccurred()) - Expect(header.CID).To(Equal(c.String())) - Expect(header.BlockNumber).To(Equal(mocks.MockHeaderMetaData.BlockNumber)) - Expect(header.Bits).To(Equal(mocks.MockHeaderMetaData.Bits)) - Expect(header.Timestamp).To(Equal(mocks.MockHeaderMetaData.Timestamp)) - Expect(header.BlockHash).To(Equal(mocks.MockHeaderMetaData.BlockHash)) - Expect(header.ParentHash).To(Equal(mocks.MockHeaderMetaData.ParentHash)) - dc, err := cid.Decode(header.CID) - Expect(err).ToNot(HaveOccurred()) - mhKey := dshelp.MultihashToDsKey(dc.Hash()) - prefixedKey := blockstore.BlockPrefix.String() + mhKey.String() - var data []byte - err = db.Get(&data, ipfsPgGet, prefixedKey) - Expect(err).ToNot(HaveOccurred()) - Expect(data).To(Equal(headerBytes)) - - // check that txs were properly indexed - trxs := make([]btc.TxModel, 0) - pgStr = `SELECT transaction_cids.id, transaction_cids.header_id, transaction_cids.index, - transaction_cids.tx_hash, transaction_cids.cid, transaction_cids.segwit, transaction_cids.witness_hash - FROM btc.transaction_cids INNER JOIN btc.header_cids ON (transaction_cids.header_id = header_cids.id) - WHERE header_cids.block_number = $1` - err = db.Select(&trxs, pgStr, mocks.MockHeaderMetaData.BlockNumber) - Expect(err).ToNot(HaveOccurred()) - Expect(len(trxs)).To(Equal(3)) - txData := make([][]byte, len(mocks.MockTransactions)) - txCIDs := make([]string, len(mocks.MockTransactions)) - for i, m := range mocks.MockTransactions { - buf := bytes.NewBuffer(make([]byte, 0)) - err = m.MsgTx().Serialize(buf) - Expect(err).ToNot(HaveOccurred()) - tx := buf.Bytes() - txData[i] = tx - c, _ := ipld.RawdataToCid(ipld.MBitcoinTx, tx, multihash.DBL_SHA2_256) - txCIDs[i] = c.String() - } - for _, tx := range trxs { - Expect(tx.SegWit).To(Equal(false)) - Expect(tx.HeaderID).To(Equal(header.ID)) - Expect(tx.WitnessHash).To(Equal("")) - Expect(tx.CID).To(Equal(txCIDs[tx.Index])) - Expect(tx.TxHash).To(Equal(mocks.MockBlock.Transactions[tx.Index].TxHash().String())) - dc, err := cid.Decode(tx.CID) - Expect(err).ToNot(HaveOccurred()) - mhKey := dshelp.MultihashToDsKey(dc.Hash()) - prefixedKey := blockstore.BlockPrefix.String() + mhKey.String() - var data []byte - err = db.Get(&data, ipfsPgGet, prefixedKey) - Expect(err).ToNot(HaveOccurred()) - Expect(data).To(Equal(txData[tx.Index])) - } - }) - }) -}) diff --git a/pkg/btc/streamer.go b/pkg/btc/streamer.go deleted file mode 100644 index f48a8cff..00000000 --- a/pkg/btc/streamer.go +++ /dev/null @@ -1,86 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package btc - -import ( - "github.com/btcsuite/btcd/rpcclient" - "github.com/btcsuite/btcd/wire" - "github.com/btcsuite/btcutil" - "github.com/sirupsen/logrus" - - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/shared" -) - -const ( - PayloadChanBufferSize = 20000 // the max eth sub buffer size -) - -// PayloadStreamer satisfies the PayloadStreamer interface for bitcoin -type PayloadStreamer struct { - Config *rpcclient.ConnConfig -} - -// NewPayloadStreamer creates a pointer to a new PayloadStreamer which satisfies the PayloadStreamer interface for bitcoin -func NewPayloadStreamer(clientConfig *rpcclient.ConnConfig) *PayloadStreamer { - return &PayloadStreamer{ - Config: clientConfig, - } -} - -// Stream is the main loop for subscribing to data from the btc block notifications -// Satisfies the shared.PayloadStreamer interface -func (ps *PayloadStreamer) Stream(payloadChan chan shared.RawChainData) (shared.ClientSubscription, error) { - logrus.Info("streaming block payloads from btc") - blockNotificationHandler := rpcclient.NotificationHandlers{ - // Notification handler for block connections, forwards new block data to the payloadChan - OnFilteredBlockConnected: func(height int32, header *wire.BlockHeader, txs []*btcutil.Tx) { - payloadChan <- BlockPayload{ - BlockHeight: int64(height), - Header: header, - Txs: txs, - } - }, - } - // Create a new client, and connect to btc ws server - client, err := rpcclient.New(ps.Config, &blockNotificationHandler) - if err != nil { - return nil, err - } - // Register for block connect notifications. - if err := client.NotifyBlocks(); err != nil { - return nil, err - } - client.WaitForShutdown() - return &ClientSubscription{client: client}, nil -} - -// ClientSubscription is a wrapper around the underlying btcd rpc client -// to fit the shared.ClientSubscription interface -type ClientSubscription struct { - client *rpcclient.Client -} - -// Unsubscribe satisfies the rpc.Subscription interface -func (bcs *ClientSubscription) Unsubscribe() { - bcs.client.Shutdown() -} - -// Err() satisfies the rpc.Subscription interface with a dummy err channel -func (bcs *ClientSubscription) Err() <-chan error { - errChan := make(chan error) - return errChan -} diff --git a/pkg/btc/subscription_config.go b/pkg/btc/subscription_config.go deleted file mode 100644 index 49c18d94..00000000 --- a/pkg/btc/subscription_config.go +++ /dev/null @@ -1,115 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package btc - -import ( - "errors" - "math/big" - - "github.com/spf13/viper" - - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/shared" -) - -// SubscriptionSettings config is used by a subscriber to specify what bitcoin data to stream from the watcher -type SubscriptionSettings struct { - BackFill bool - BackFillOnly bool - Start *big.Int - End *big.Int // set to 0 or a negative value to have no ending block - HeaderFilter HeaderFilter - TxFilter TxFilter -} - -// HeaderFilter contains filter settings for headers -type HeaderFilter struct { - Off bool -} - -// TxFilter contains filter settings for txs -type TxFilter struct { - Off bool - Segwit bool // allow filtering for segwit trxs - WitnessHashes []string // allow filtering for specific witness hashes - Indexes []int64 // allow filtering for specific transaction indexes (e.g. 0 for coinbase transactions) - PkScriptClasses []uint8 // allow filtering for txs that have at least one tx output with the specified pkscript class - MultiSig bool // allow filtering for txs that have at least one tx output that requires more than one signature - Addresses []string // allow filtering for txs that have at least one tx output with at least one of the provided addresses -} - -// Init is used to initialize a EthSubscription struct with env variables -func NewBtcSubscriptionConfig() (*SubscriptionSettings, error) { - sc := new(SubscriptionSettings) - // Below default to false, which means we do not backfill by default - sc.BackFill = viper.GetBool("watcher.btcSubscription.historicalData") - sc.BackFillOnly = viper.GetBool("watcher.btcSubscription.historicalDataOnly") - // Below default to 0 - // 0 start means we start at the beginning and 0 end means we continue indefinitely - sc.Start = big.NewInt(viper.GetInt64("watcher.btcSubscription.startingBlock")) - sc.End = big.NewInt(viper.GetInt64("watcher.btcSubscription.endingBlock")) - // Below default to false, which means we get all headers by default - sc.HeaderFilter = HeaderFilter{ - Off: viper.GetBool("watcher.btcSubscription.headerFilter.off"), - } - // Below defaults to false and two slices of length 0 - // Which means we get all transactions by default - pksc := viper.Get("watcher.btcSubscription.txFilter.pkScriptClass") - pkScriptClasses, ok := pksc.([]uint8) - if !ok { - return nil, errors.New("watcher.btcSubscription.txFilter.pkScriptClass needs to be an array of uint8s") - } - is := viper.Get("watcher.btcSubscription.txFilter.indexes") - indexes, ok := is.([]int64) - if !ok { - return nil, errors.New("watcher.btcSubscription.txFilter.indexes needs to be an array of int64s") - } - sc.TxFilter = TxFilter{ - Off: viper.GetBool("watcher.btcSubscription.txFilter.off"), - Segwit: viper.GetBool("watcher.btcSubscription.txFilter.segwit"), - WitnessHashes: viper.GetStringSlice("watcher.btcSubscription.txFilter.witnessHashes"), - PkScriptClasses: pkScriptClasses, - Indexes: indexes, - MultiSig: viper.GetBool("watcher.btcSubscription.txFilter.multiSig"), - Addresses: viper.GetStringSlice("watcher.btcSubscription.txFilter.addresses"), - } - return sc, nil -} - -// StartingBlock satisfies the SubscriptionSettings() interface -func (sc *SubscriptionSettings) StartingBlock() *big.Int { - return sc.Start -} - -// EndingBlock satisfies the SubscriptionSettings() interface -func (sc *SubscriptionSettings) EndingBlock() *big.Int { - return sc.End -} - -// HistoricalData satisfies the SubscriptionSettings() interface -func (sc *SubscriptionSettings) HistoricalData() bool { - return sc.BackFill -} - -// HistoricalDataOnly satisfies the SubscriptionSettings() interface -func (sc *SubscriptionSettings) HistoricalDataOnly() bool { - return sc.BackFillOnly -} - -// ChainType satisfies the SubscriptionSettings() interface -func (sc *SubscriptionSettings) ChainType() shared.ChainType { - return shared.Bitcoin -} diff --git a/pkg/btc/test_helpers.go b/pkg/btc/test_helpers.go deleted file mode 100644 index 09a36093..00000000 --- a/pkg/btc/test_helpers.go +++ /dev/null @@ -1,43 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package btc - -import ( - . "github.com/onsi/gomega" - - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/postgres" -) - -// TearDownDB is used to tear down the watcher dbs after tests -func TearDownDB(db *postgres.DB) { - tx, err := db.Beginx() - Expect(err).NotTo(HaveOccurred()) - - _, err = tx.Exec(`DELETE FROM btc.header_cids`) - Expect(err).NotTo(HaveOccurred()) - _, err = tx.Exec(`DELETE FROM btc.transaction_cids`) - Expect(err).NotTo(HaveOccurred()) - _, err = tx.Exec(`DELETE FROM btc.tx_inputs`) - Expect(err).NotTo(HaveOccurred()) - _, err = tx.Exec(`DELETE FROM btc.tx_outputs`) - Expect(err).NotTo(HaveOccurred()) - _, err = tx.Exec(`DELETE FROM blocks`) - Expect(err).NotTo(HaveOccurred()) - - err = tx.Commit() - Expect(err).NotTo(HaveOccurred()) -} diff --git a/pkg/btc/types.go b/pkg/btc/types.go deleted file mode 100644 index 116984bf..00000000 --- a/pkg/btc/types.go +++ /dev/null @@ -1,76 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package btc - -import ( - "math/big" - - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/ipfs" - - "github.com/btcsuite/btcd/wire" - "github.com/btcsuite/btcutil" -) - -// BlockPayload packages the block and tx data received from block connection notifications -type BlockPayload struct { - BlockHeight int64 - Header *wire.BlockHeader - Txs []*btcutil.Tx -} - -// ConvertedPayload is a custom type which packages raw BTC data for publishing to IPFS and filtering to subscribers -// Returned by PayloadConverter -// Passed to IPLDPublisher and ResponseFilterer -type ConvertedPayload struct { - BlockPayload - TxMetaData []TxModelWithInsAndOuts -} - -// Height satisfies the StreamedIPLDs interface -func (cp ConvertedPayload) Height() int64 { - return cp.BlockPayload.BlockHeight -} - -// CIDPayload is a struct to hold all the CIDs and their associated meta data for indexing in Postgres -// Returned by IPLDPublisher -// Passed to CIDIndexer -type CIDPayload struct { - HeaderCID HeaderModel - TransactionCIDs []TxModelWithInsAndOuts -} - -// CIDWrapper is used to direct fetching of IPLDs from IPFS -// Returned by CIDRetriever -// Passed to IPLDFetcher -type CIDWrapper struct { - BlockNumber *big.Int - Header HeaderModel - Transactions []TxModel -} - -// IPLDs is used to package raw IPLD block data fetched from IPFS and returned by the server -// Returned by IPLDFetcher and ResponseFilterer -type IPLDs struct { - BlockNumber *big.Int - Header ipfs.BlockModel - Transactions []ipfs.BlockModel -} - -// Height satisfies the StreamedIPLDs interface -func (i IPLDs) Height() int64 { - return i.BlockNumber.Int64() -} diff --git a/pkg/config/config_suite_test.go b/pkg/config/config_suite_test.go deleted file mode 100644 index 97aa327c..00000000 --- a/pkg/config/config_suite_test.go +++ /dev/null @@ -1,29 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package config_test - -import ( - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - - "testing" -) - -func TestConfig(t *testing.T) { - RegisterFailHandler(Fail) - RunSpecs(t, "Config Suite") -} diff --git a/pkg/config/config_test.go b/pkg/config/config_test.go deleted file mode 100644 index 9fccc554..00000000 --- a/pkg/config/config_test.go +++ /dev/null @@ -1,48 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package config_test - -import ( - "bytes" - - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - "github.com/spf13/viper" -) - -var vulcanizeConfig = []byte(` -[database] -name = "dbname" -hostname = "localhost" -port = 5432 -`) - -var _ = Describe("Loading the config", func() { - It("reads the private config using the environment", func() { - viper.SetConfigName("config") - viper.AddConfigPath("$GOPATH/src/github.com/vulcanize/ipfs-blockchain-watcher/environments/") - - testConfig := viper.New() - testConfig.SetConfigType("toml") - err := testConfig.ReadConfig(bytes.NewBuffer(vulcanizeConfig)) - Expect(err).To(BeNil()) - Expect(testConfig.Get("database.hostname")).To(Equal("localhost")) - Expect(testConfig.Get("database.name")).To(Equal("dbname")) - Expect(testConfig.Get("database.port")).To(Equal(int64(5432))) - }) - -}) diff --git a/pkg/config/database.go b/pkg/config/database.go deleted file mode 100644 index 5b868188..00000000 --- a/pkg/config/database.go +++ /dev/null @@ -1,78 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package config - -import ( - "fmt" - - "github.com/spf13/viper" -) - -// Env variables -const ( - DATABASE_NAME = "DATABASE_NAME" - DATABASE_HOSTNAME = "DATABASE_HOSTNAME" - DATABASE_PORT = "DATABASE_PORT" - DATABASE_USER = "DATABASE_USER" - DATABASE_PASSWORD = "DATABASE_PASSWORD" - DATABASE_MAX_IDLE_CONNECTIONS = "DATABASE_MAX_IDLE_CONNECTIONS" - DATABASE_MAX_OPEN_CONNECTIONS = "DATABASE_MAX_OPEN_CONNECTIONS" - DATABASE_MAX_CONN_LIFETIME = "DATABASE_MAX_CONN_LIFETIME" -) - -type Database struct { - Hostname string - Name string - User string - Password string - Port int - MaxIdle int - MaxOpen int - MaxLifetime int -} - -func DbConnectionString(dbConfig Database) string { - if len(dbConfig.User) > 0 && len(dbConfig.Password) > 0 { - return fmt.Sprintf("postgresql://%s:%s@%s:%d/%s?sslmode=disable", - dbConfig.User, dbConfig.Password, dbConfig.Hostname, dbConfig.Port, dbConfig.Name) - } - if len(dbConfig.User) > 0 && len(dbConfig.Password) == 0 { - return fmt.Sprintf("postgresql://%s@%s:%d/%s?sslmode=disable", - dbConfig.User, dbConfig.Hostname, dbConfig.Port, dbConfig.Name) - } - return fmt.Sprintf("postgresql://%s:%d/%s?sslmode=disable", dbConfig.Hostname, dbConfig.Port, dbConfig.Name) -} - -func (d *Database) Init() { - viper.BindEnv("database.name", DATABASE_NAME) - viper.BindEnv("database.hostname", DATABASE_HOSTNAME) - viper.BindEnv("database.port", DATABASE_PORT) - viper.BindEnv("database.user", DATABASE_USER) - viper.BindEnv("database.password", DATABASE_PASSWORD) - viper.BindEnv("database.maxIdle", DATABASE_MAX_IDLE_CONNECTIONS) - viper.BindEnv("database.maxOpen", DATABASE_MAX_OPEN_CONNECTIONS) - viper.BindEnv("database.maxLifetime", DATABASE_MAX_CONN_LIFETIME) - - d.Name = viper.GetString("database.name") - d.Hostname = viper.GetString("database.hostname") - d.Port = viper.GetInt("database.port") - d.User = viper.GetString("database.user") - d.Password = viper.GetString("database.password") - d.MaxIdle = viper.GetInt("database.maxIdle") - d.MaxOpen = viper.GetInt("database.maxOpen") - d.MaxLifetime = viper.GetInt("database.maxLifetime") -} diff --git a/pkg/eth/cleaner.go b/pkg/eth/cleaner.go deleted file mode 100644 index 3b0bdf82..00000000 --- a/pkg/eth/cleaner.go +++ /dev/null @@ -1,356 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package eth - -import ( - "fmt" - - "github.com/jmoiron/sqlx" - "github.com/sirupsen/logrus" - - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/postgres" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/shared" -) - -// Cleaner satisfies the shared.Cleaner interface fo ethereum -type Cleaner struct { - db *postgres.DB -} - -// NewCleaner returns a new Cleaner struct that satisfies the shared.Cleaner interface -func NewCleaner(db *postgres.DB) *Cleaner { - return &Cleaner{ - db: db, - } -} - -// ResetValidation resets the validation level to 0 to enable revalidation -func (c *Cleaner) ResetValidation(rngs [][2]uint64) error { - tx, err := c.db.Beginx() - if err != nil { - return err - } - for _, rng := range rngs { - logrus.Infof("eth db cleaner resetting validation level to 0 for block range %d to %d", rng[0], rng[1]) - pgStr := `UPDATE eth.header_cids - SET times_validated = 0 - WHERE block_number BETWEEN $1 AND $2` - if _, err := tx.Exec(pgStr, rng[0], rng[1]); err != nil { - shared.Rollback(tx) - return err - } - } - return tx.Commit() -} - -// Clean removes the specified data from the db within the provided block range -func (c *Cleaner) Clean(rngs [][2]uint64, t shared.DataType) error { - tx, err := c.db.Beginx() - if err != nil { - return err - } - for _, rng := range rngs { - logrus.Infof("eth db cleaner cleaning up block range %d to %d", rng[0], rng[1]) - if err := c.clean(tx, rng, t); err != nil { - shared.Rollback(tx) - return err - } - } - if err := tx.Commit(); err != nil { - return err - } - logrus.Infof("eth db cleaner vacuum analyzing cleaned tables to free up space from deleted rows") - return c.vacuumAnalyze(t) -} - -func (c *Cleaner) clean(tx *sqlx.Tx, rng [2]uint64, t shared.DataType) error { - switch t { - case shared.Full, shared.Headers: - return c.cleanFull(tx, rng) - case shared.Uncles: - if err := c.cleanUncleIPLDs(tx, rng); err != nil { - return err - } - return c.cleanUncleMetaData(tx, rng) - case shared.Transactions: - if err := c.cleanReceiptIPLDs(tx, rng); err != nil { - return err - } - if err := c.cleanTransactionIPLDs(tx, rng); err != nil { - return err - } - return c.cleanTransactionMetaData(tx, rng) - case shared.Receipts: - if err := c.cleanReceiptIPLDs(tx, rng); err != nil { - return err - } - return c.cleanReceiptMetaData(tx, rng) - case shared.State: - if err := c.cleanStorageIPLDs(tx, rng); err != nil { - return err - } - if err := c.cleanStateIPLDs(tx, rng); err != nil { - return err - } - return c.cleanStateMetaData(tx, rng) - case shared.Storage: - if err := c.cleanStorageIPLDs(tx, rng); err != nil { - return err - } - return c.cleanStorageMetaData(tx, rng) - default: - return fmt.Errorf("eth cleaner unrecognized type: %s", t.String()) - } -} - -func (c *Cleaner) vacuumAnalyze(t shared.DataType) error { - switch t { - case shared.Full, shared.Headers: - return c.vacuumFull() - case shared.Uncles: - if err := c.vacuumUncles(); err != nil { - return err - } - case shared.Transactions: - if err := c.vacuumTxs(); err != nil { - return err - } - if err := c.vacuumRcts(); err != nil { - return err - } - case shared.Receipts: - if err := c.vacuumRcts(); err != nil { - return err - } - case shared.State: - if err := c.vacuumState(); err != nil { - return err - } - if err := c.vacuumAccounts(); err != nil { - return err - } - if err := c.vacuumStorage(); err != nil { - return err - } - case shared.Storage: - if err := c.vacuumStorage(); err != nil { - return err - } - default: - return fmt.Errorf("eth cleaner unrecognized type: %s", t.String()) - } - return c.vacuumIPLDs() -} - -func (c *Cleaner) vacuumFull() error { - if err := c.vacuumHeaders(); err != nil { - return err - } - if err := c.vacuumUncles(); err != nil { - return err - } - if err := c.vacuumTxs(); err != nil { - return err - } - if err := c.vacuumRcts(); err != nil { - return err - } - if err := c.vacuumState(); err != nil { - return err - } - if err := c.vacuumAccounts(); err != nil { - return err - } - return c.vacuumStorage() -} - -func (c *Cleaner) vacuumHeaders() error { - _, err := c.db.Exec(`VACUUM ANALYZE eth.header_cids`) - return err -} - -func (c *Cleaner) vacuumUncles() error { - _, err := c.db.Exec(`VACUUM ANALYZE eth.uncle_cids`) - return err -} - -func (c *Cleaner) vacuumTxs() error { - _, err := c.db.Exec(`VACUUM ANALYZE eth.transaction_cids`) - return err -} - -func (c *Cleaner) vacuumRcts() error { - _, err := c.db.Exec(`VACUUM ANALYZE eth.receipt_cids`) - return err -} - -func (c *Cleaner) vacuumState() error { - _, err := c.db.Exec(`VACUUM ANALYZE eth.state_cids`) - return err -} - -func (c *Cleaner) vacuumAccounts() error { - _, err := c.db.Exec(`VACUUM ANALYZE eth.state_accounts`) - return err -} - -func (c *Cleaner) vacuumStorage() error { - _, err := c.db.Exec(`VACUUM ANALYZE eth.storage_cids`) - return err -} - -func (c *Cleaner) vacuumIPLDs() error { - _, err := c.db.Exec(`VACUUM ANALYZE public.blocks`) - return err -} - -func (c *Cleaner) cleanFull(tx *sqlx.Tx, rng [2]uint64) error { - if err := c.cleanStorageIPLDs(tx, rng); err != nil { - return err - } - if err := c.cleanStateIPLDs(tx, rng); err != nil { - return err - } - if err := c.cleanReceiptIPLDs(tx, rng); err != nil { - return err - } - if err := c.cleanTransactionIPLDs(tx, rng); err != nil { - return err - } - if err := c.cleanUncleIPLDs(tx, rng); err != nil { - return err - } - if err := c.cleanHeaderIPLDs(tx, rng); err != nil { - return err - } - return c.cleanHeaderMetaData(tx, rng) -} - -func (c *Cleaner) cleanStorageIPLDs(tx *sqlx.Tx, rng [2]uint64) error { - pgStr := `DELETE FROM public.blocks A - USING eth.storage_cids B, eth.state_cids C, eth.header_cids D - WHERE A.key = B.mh_key - AND B.state_id = C.id - AND C.header_id = D.id - AND D.block_number BETWEEN $1 AND $2` - _, err := tx.Exec(pgStr, rng[0], rng[1]) - return err -} - -func (c *Cleaner) cleanStorageMetaData(tx *sqlx.Tx, rng [2]uint64) error { - pgStr := `DELETE FROM eth.storage_cids A - USING eth.state_cids B, eth.header_cids C - WHERE A.state_id = B.id - AND B.header_id = C.id - AND C.block_number BETWEEN $1 AND $2` - _, err := tx.Exec(pgStr, rng[0], rng[1]) - return err -} - -func (c *Cleaner) cleanStateIPLDs(tx *sqlx.Tx, rng [2]uint64) error { - pgStr := `DELETE FROM public.blocks A - USING eth.state_cids B, eth.header_cids C - WHERE A.key = B.mh_key - AND B.header_id = C.id - AND C.block_number BETWEEN $1 AND $2` - _, err := tx.Exec(pgStr, rng[0], rng[1]) - return err -} - -func (c *Cleaner) cleanStateMetaData(tx *sqlx.Tx, rng [2]uint64) error { - pgStr := `DELETE FROM eth.state_cids A - USING eth.header_cids B - WHERE A.header_id = B.id - AND B.block_number BETWEEN $1 AND $2` - _, err := tx.Exec(pgStr, rng[0], rng[1]) - return err -} - -func (c *Cleaner) cleanReceiptIPLDs(tx *sqlx.Tx, rng [2]uint64) error { - pgStr := `DELETE FROM public.blocks A - USING eth.receipt_cids B, eth.transaction_cids C, eth.header_cids D - WHERE A.key = B.mh_key - AND B.tx_id = C.id - AND C.header_id = D.id - AND D.block_number BETWEEN $1 AND $2` - _, err := tx.Exec(pgStr, rng[0], rng[1]) - return err -} - -func (c *Cleaner) cleanReceiptMetaData(tx *sqlx.Tx, rng [2]uint64) error { - pgStr := `DELETE FROM eth.receipt_cids A - USING eth.transaction_cids B, eth.header_cids C - WHERE A.tx_id = B.id - AND B.header_id = C.id - AND C.block_number BETWEEN $1 AND $2` - _, err := tx.Exec(pgStr, rng[0], rng[1]) - return err -} - -func (c *Cleaner) cleanTransactionIPLDs(tx *sqlx.Tx, rng [2]uint64) error { - pgStr := `DELETE FROM public.blocks A - USING eth.transaction_cids B, eth.header_cids C - WHERE A.key = B.mh_key - AND B.header_id = C.id - AND C.block_number BETWEEN $1 AND $2` - _, err := tx.Exec(pgStr, rng[0], rng[1]) - return err -} - -func (c *Cleaner) cleanTransactionMetaData(tx *sqlx.Tx, rng [2]uint64) error { - pgStr := `DELETE FROM eth.transaction_cids A - USING eth.header_cids B - WHERE A.header_id = B.id - AND B.block_number BETWEEN $1 AND $2` - _, err := tx.Exec(pgStr, rng[0], rng[1]) - return err -} - -func (c *Cleaner) cleanUncleIPLDs(tx *sqlx.Tx, rng [2]uint64) error { - pgStr := `DELETE FROM public.blocks A - USING eth.uncle_cids B, eth.header_cids C - WHERE A.key = B.mh_key - AND B.header_id = C.id - AND C.block_number BETWEEN $1 AND $2` - _, err := tx.Exec(pgStr, rng[0], rng[1]) - return err -} - -func (c *Cleaner) cleanUncleMetaData(tx *sqlx.Tx, rng [2]uint64) error { - pgStr := `DELETE FROM eth.uncle_cids A - USING eth.header_cids B - WHERE A.header_id = B.id - AND B.block_number BETWEEN $1 AND $2` - _, err := tx.Exec(pgStr, rng[0], rng[1]) - return err -} - -func (c *Cleaner) cleanHeaderIPLDs(tx *sqlx.Tx, rng [2]uint64) error { - pgStr := `DELETE FROM public.blocks A - USING eth.header_cids B - WHERE A.key = B.mh_key - AND B.block_number BETWEEN $1 AND $2` - _, err := tx.Exec(pgStr, rng[0], rng[1]) - return err -} - -func (c *Cleaner) cleanHeaderMetaData(tx *sqlx.Tx, rng [2]uint64) error { - pgStr := `DELETE FROM eth.header_cids - WHERE block_number BETWEEN $1 AND $2` - _, err := tx.Exec(pgStr, rng[0], rng[1]) - return err -} diff --git a/pkg/eth/cleaner_test.go b/pkg/eth/cleaner_test.go deleted file mode 100644 index 14900bbc..00000000 --- a/pkg/eth/cleaner_test.go +++ /dev/null @@ -1,698 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package eth_test - -import ( - "math/big" - - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/crypto" - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/eth" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/postgres" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/shared" -) - -var ( - // Block 0 - // header variables - blockHash1 = crypto.Keccak256Hash([]byte{00, 02}) - blocKNumber1 = big.NewInt(0) - headerCID1 = shared.TestCID([]byte("mockHeader1CID")) - headerMhKey1 = shared.MultihashKeyFromCID(headerCID1) - parentHash = crypto.Keccak256Hash([]byte{00, 01}) - totalDifficulty = "50000000000000000000" - reward = "5000000000000000000" - headerModel = eth.HeaderModel{ - BlockHash: blockHash1.String(), - BlockNumber: blocKNumber1.String(), - CID: headerCID1.String(), - MhKey: headerMhKey1, - ParentHash: parentHash.String(), - TotalDifficulty: totalDifficulty, - Reward: reward, - } - - // tx variables - tx1CID = shared.TestCID([]byte("mockTx1CID")) - tx1MhKey = shared.MultihashKeyFromCID(tx1CID) - tx2CID = shared.TestCID([]byte("mockTx2CID")) - tx2MhKey = shared.MultihashKeyFromCID(tx2CID) - tx1Hash = crypto.Keccak256Hash([]byte{01, 01}) - tx2Hash = crypto.Keccak256Hash([]byte{01, 02}) - txSrc = common.HexToAddress("0x010a") - txDst = common.HexToAddress("0x020a") - txModels1 = []eth.TxModel{ - { - CID: tx1CID.String(), - MhKey: tx1MhKey, - TxHash: tx1Hash.String(), - Index: 0, - }, - { - CID: tx2CID.String(), - MhKey: tx2MhKey, - TxHash: tx2Hash.String(), - Index: 1, - }, - } - - // uncle variables - uncleCID = shared.TestCID([]byte("mockUncle1CID")) - uncleMhKey = shared.MultihashKeyFromCID(uncleCID) - uncleHash = crypto.Keccak256Hash([]byte{02, 02}) - uncleParentHash = crypto.Keccak256Hash([]byte{02, 01}) - uncleReward = "1000000000000000000" - uncleModels1 = []eth.UncleModel{ - { - CID: uncleCID.String(), - MhKey: uncleMhKey, - Reward: uncleReward, - BlockHash: uncleHash.String(), - ParentHash: uncleParentHash.String(), - }, - } - - // receipt variables - rct1CID = shared.TestCID([]byte("mockRct1CID")) - rct1MhKey = shared.MultihashKeyFromCID(rct1CID) - rct2CID = shared.TestCID([]byte("mockRct2CID")) - rct2MhKey = shared.MultihashKeyFromCID(rct2CID) - rct1Contract = common.Address{} - rct2Contract = common.HexToAddress("0x010c") - receiptModels1 = map[common.Hash]eth.ReceiptModel{ - tx1Hash: { - CID: rct1CID.String(), - MhKey: rct1MhKey, - ContractHash: crypto.Keccak256Hash(rct1Contract.Bytes()).String(), - }, - tx2Hash: { - CID: rct2CID.String(), - MhKey: rct2MhKey, - ContractHash: crypto.Keccak256Hash(rct2Contract.Bytes()).String(), - }, - } - - // state variables - state1CID1 = shared.TestCID([]byte("mockState1CID1")) - state1MhKey1 = shared.MultihashKeyFromCID(state1CID1) - state1Path = []byte{'\x01'} - state1Key = crypto.Keccak256Hash(txSrc.Bytes()) - state2CID1 = shared.TestCID([]byte("mockState2CID1")) - state2MhKey1 = shared.MultihashKeyFromCID(state2CID1) - state2Path = []byte{'\x02'} - state2Key = crypto.Keccak256Hash(txDst.Bytes()) - stateModels1 = []eth.StateNodeModel{ - { - CID: state1CID1.String(), - MhKey: state1MhKey1, - Path: state1Path, - NodeType: 2, - StateKey: state1Key.String(), - }, - { - CID: state2CID1.String(), - MhKey: state2MhKey1, - Path: state2Path, - NodeType: 2, - StateKey: state2Key.String(), - }, - } - - // storage variables - storageCID = shared.TestCID([]byte("mockStorageCID1")) - storageMhKey = shared.MultihashKeyFromCID(storageCID) - storagePath = []byte{'\x01'} - storageKey = crypto.Keccak256Hash(common.Hex2Bytes("0x0000000000000000000000000000000000000000000000000000000000000000")) - storageModels1 = map[string][]eth.StorageNodeModel{ - common.Bytes2Hex(state1Path): { - { - CID: storageCID.String(), - MhKey: storageMhKey, - StorageKey: storageKey.String(), - Path: storagePath, - NodeType: 2, - }, - }, - } - mockCIDPayload1 = ð.CIDPayload{ - HeaderCID: headerModel, - UncleCIDs: uncleModels1, - TransactionCIDs: txModels1, - ReceiptCIDs: receiptModels1, - StateNodeCIDs: stateModels1, - StorageNodeCIDs: storageModels1, - } - - // Block 1 - // header variables - blockHash2 = crypto.Keccak256Hash([]byte{00, 03}) - blocKNumber2 = big.NewInt(1) - headerCID2 = shared.TestCID([]byte("mockHeaderCID2")) - headerMhKey2 = shared.MultihashKeyFromCID(headerCID2) - headerModel2 = eth.HeaderModel{ - BlockHash: blockHash2.String(), - BlockNumber: blocKNumber2.String(), - CID: headerCID2.String(), - MhKey: headerMhKey2, - ParentHash: blockHash1.String(), - TotalDifficulty: totalDifficulty, - Reward: reward, - } - // tx variables - tx3CID = shared.TestCID([]byte("mockTx3CID")) - tx3MhKey = shared.MultihashKeyFromCID(tx3CID) - tx3Hash = crypto.Keccak256Hash([]byte{01, 03}) - txModels2 = []eth.TxModel{ - { - CID: tx3CID.String(), - MhKey: tx3MhKey, - TxHash: tx3Hash.String(), - Index: 0, - }, - } - // receipt variables - rct3CID = shared.TestCID([]byte("mockRct3CID")) - rct3MhKey = shared.MultihashKeyFromCID(rct3CID) - receiptModels2 = map[common.Hash]eth.ReceiptModel{ - tx3Hash: { - CID: rct3CID.String(), - MhKey: rct3MhKey, - ContractHash: crypto.Keccak256Hash(rct1Contract.Bytes()).String(), - }, - } - - // state variables - state1CID2 = shared.TestCID([]byte("mockState1CID2")) - state1MhKey2 = shared.MultihashKeyFromCID(state1CID2) - stateModels2 = []eth.StateNodeModel{ - { - CID: state1CID2.String(), - MhKey: state1MhKey2, - Path: state1Path, - NodeType: 2, - StateKey: state1Key.String(), - }, - } - mockCIDPayload2 = ð.CIDPayload{ - HeaderCID: headerModel2, - TransactionCIDs: txModels2, - ReceiptCIDs: receiptModels2, - StateNodeCIDs: stateModels2, - } - rngs = [][2]uint64{{0, 1}} - mhKeys = []string{ - headerMhKey1, - headerMhKey2, - uncleMhKey, - tx1MhKey, - tx2MhKey, - tx3MhKey, - rct1MhKey, - rct2MhKey, - rct3MhKey, - state1MhKey1, - state2MhKey1, - state1MhKey2, - storageMhKey, - } - mockData = []byte{'\x01'} -) - -var _ = Describe("Cleaner", func() { - var ( - db *postgres.DB - repo *eth.CIDIndexer - cleaner *eth.Cleaner - ) - BeforeEach(func() { - var err error - db, err = shared.SetupDB() - Expect(err).ToNot(HaveOccurred()) - repo = eth.NewCIDIndexer(db) - cleaner = eth.NewCleaner(db) - }) - Describe("Clean", func() { - BeforeEach(func() { - for _, key := range mhKeys { - _, err := db.Exec(`INSERT INTO public.blocks (key, data) VALUES ($1, $2)`, key, mockData) - Expect(err).ToNot(HaveOccurred()) - } - - err := repo.Index(mockCIDPayload1) - Expect(err).ToNot(HaveOccurred()) - err = repo.Index(mockCIDPayload2) - Expect(err).ToNot(HaveOccurred()) - - tx, err := db.Beginx() - Expect(err).ToNot(HaveOccurred()) - - var startingIPFSBlocksCount int - pgStr := `SELECT COUNT(*) FROM public.blocks` - err = tx.Get(&startingIPFSBlocksCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - var startingStorageCount int - pgStr = `SELECT COUNT(*) FROM eth.storage_cids` - err = tx.Get(&startingStorageCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - var startingStateCount int - pgStr = `SELECT COUNT(*) FROM eth.state_cids` - err = tx.Get(&startingStateCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - var startingReceiptCount int - pgStr = `SELECT COUNT(*) FROM eth.receipt_cids` - err = tx.Get(&startingReceiptCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - var startingTxCount int - pgStr = `SELECT COUNT(*) FROM eth.transaction_cids` - err = tx.Get(&startingTxCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - var startingUncleCount int - pgStr = `SELECT COUNT(*) FROM eth.uncle_cids` - err = tx.Get(&startingUncleCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - var startingHeaderCount int - pgStr = `SELECT COUNT(*) FROM eth.header_cids` - err = tx.Get(&startingHeaderCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - - err = tx.Commit() - Expect(err).ToNot(HaveOccurred()) - - Expect(startingIPFSBlocksCount).To(Equal(13)) - Expect(startingStorageCount).To(Equal(1)) - Expect(startingStateCount).To(Equal(3)) - Expect(startingReceiptCount).To(Equal(3)) - Expect(startingTxCount).To(Equal(3)) - Expect(startingUncleCount).To(Equal(1)) - Expect(startingHeaderCount).To(Equal(2)) - }) - AfterEach(func() { - eth.TearDownDB(db) - }) - It("Cleans everything", func() { - err := cleaner.Clean(rngs, shared.Full) - Expect(err).ToNot(HaveOccurred()) - - tx, err := db.Beginx() - Expect(err).ToNot(HaveOccurred()) - - pgStr := `SELECT COUNT(*) FROM eth.header_cids` - var headerCount int - err = tx.Get(&headerCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - var uncleCount int - pgStr = `SELECT COUNT(*) FROM eth.uncle_cids` - err = tx.Get(&uncleCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - var txCount int - pgStr = `SELECT COUNT(*) FROM eth.transaction_cids` - err = tx.Get(&txCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - var rctCount int - pgStr = `SELECT COUNT(*) FROM eth.receipt_cids` - err = tx.Get(&rctCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - var stateCount int - pgStr = `SELECT COUNT(*) FROM eth.state_cids` - err = tx.Get(&stateCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - var storageCount int - pgStr = `SELECT COUNT(*) FROM eth.storage_cids` - err = tx.Get(&storageCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - var blocksCount int - pgStr = `SELECT COUNT(*) FROM public.blocks` - err = tx.Get(&blocksCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - - err = tx.Commit() - Expect(err).ToNot(HaveOccurred()) - - Expect(headerCount).To(Equal(0)) - Expect(uncleCount).To(Equal(0)) - Expect(txCount).To(Equal(0)) - Expect(rctCount).To(Equal(0)) - Expect(stateCount).To(Equal(0)) - Expect(storageCount).To(Equal(0)) - Expect(blocksCount).To(Equal(0)) - }) - It("Cleans headers and all linked data (same as full)", func() { - err := cleaner.Clean(rngs, shared.Headers) - Expect(err).ToNot(HaveOccurred()) - - tx, err := db.Beginx() - Expect(err).ToNot(HaveOccurred()) - - var headerCount int - pgStr := `SELECT COUNT(*) FROM eth.header_cids` - err = tx.Get(&headerCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - var uncleCount int - pgStr = `SELECT COUNT(*) FROM eth.uncle_cids` - err = tx.Get(&uncleCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - var txCount int - pgStr = `SELECT COUNT(*) FROM eth.transaction_cids` - err = tx.Get(&txCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - var rctCount int - pgStr = `SELECT COUNT(*) FROM eth.receipt_cids` - err = tx.Get(&rctCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - var stateCount int - pgStr = `SELECT COUNT(*) FROM eth.state_cids` - err = tx.Get(&stateCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - var storageCount int - pgStr = `SELECT COUNT(*) FROM eth.storage_cids` - err = tx.Get(&storageCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - var blocksCount int - pgStr = `SELECT COUNT(*) FROM public.blocks` - err = tx.Get(&blocksCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - - err = tx.Commit() - Expect(err).ToNot(HaveOccurred()) - - Expect(headerCount).To(Equal(0)) - Expect(uncleCount).To(Equal(0)) - Expect(txCount).To(Equal(0)) - Expect(rctCount).To(Equal(0)) - Expect(stateCount).To(Equal(0)) - Expect(storageCount).To(Equal(0)) - Expect(blocksCount).To(Equal(0)) - }) - It("Cleans uncles", func() { - err := cleaner.Clean(rngs, shared.Uncles) - Expect(err).ToNot(HaveOccurred()) - - tx, err := db.Beginx() - Expect(err).ToNot(HaveOccurred()) - - var headerCount int - pgStr := `SELECT COUNT(*) FROM eth.header_cids` - err = tx.Get(&headerCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - var uncleCount int - pgStr = `SELECT COUNT(*) FROM eth.uncle_cids` - err = tx.Get(&uncleCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - var txCount int - pgStr = `SELECT COUNT(*) FROM eth.transaction_cids` - err = tx.Get(&txCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - var rctCount int - pgStr = `SELECT COUNT(*) FROM eth.receipt_cids` - err = tx.Get(&rctCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - var stateCount int - pgStr = `SELECT COUNT(*) FROM eth.state_cids` - err = tx.Get(&stateCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - var storageCount int - pgStr = `SELECT COUNT(*) FROM eth.storage_cids` - err = tx.Get(&storageCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - var blocksCount int - pgStr = `SELECT COUNT(*) FROM public.blocks` - err = tx.Get(&blocksCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - - err = tx.Commit() - Expect(err).ToNot(HaveOccurred()) - - Expect(headerCount).To(Equal(2)) - Expect(uncleCount).To(Equal(0)) - Expect(txCount).To(Equal(3)) - Expect(rctCount).To(Equal(3)) - Expect(stateCount).To(Equal(3)) - Expect(storageCount).To(Equal(1)) - Expect(blocksCount).To(Equal(12)) - }) - It("Cleans transactions and linked receipts", func() { - err := cleaner.Clean(rngs, shared.Transactions) - Expect(err).ToNot(HaveOccurred()) - - tx, err := db.Beginx() - Expect(err).ToNot(HaveOccurred()) - - var headerCount int - pgStr := `SELECT COUNT(*) FROM eth.header_cids` - err = tx.Get(&headerCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - var uncleCount int - pgStr = `SELECT COUNT(*) FROM eth.uncle_cids` - err = tx.Get(&uncleCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - var txCount int - pgStr = `SELECT COUNT(*) FROM eth.transaction_cids` - err = tx.Get(&txCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - var rctCount int - pgStr = `SELECT COUNT(*) FROM eth.receipt_cids` - err = tx.Get(&rctCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - var stateCount int - pgStr = `SELECT COUNT(*) FROM eth.state_cids` - err = tx.Get(&stateCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - var storageCount int - pgStr = `SELECT COUNT(*) FROM eth.storage_cids` - err = tx.Get(&storageCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - var blocksCount int - pgStr = `SELECT COUNT(*) FROM public.blocks` - err = tx.Get(&blocksCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - - err = tx.Commit() - Expect(err).ToNot(HaveOccurred()) - - Expect(headerCount).To(Equal(2)) - Expect(uncleCount).To(Equal(1)) - Expect(txCount).To(Equal(0)) - Expect(rctCount).To(Equal(0)) - Expect(stateCount).To(Equal(3)) - Expect(storageCount).To(Equal(1)) - Expect(blocksCount).To(Equal(7)) - }) - It("Cleans receipts", func() { - err := cleaner.Clean(rngs, shared.Receipts) - Expect(err).ToNot(HaveOccurred()) - - tx, err := db.Beginx() - Expect(err).ToNot(HaveOccurred()) - - var headerCount int - pgStr := `SELECT COUNT(*) FROM eth.header_cids` - err = tx.Get(&headerCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - var uncleCount int - pgStr = `SELECT COUNT(*) FROM eth.uncle_cids` - err = tx.Get(&uncleCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - var txCount int - pgStr = `SELECT COUNT(*) FROM eth.transaction_cids` - err = tx.Get(&txCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - var rctCount int - pgStr = `SELECT COUNT(*) FROM eth.receipt_cids` - err = tx.Get(&rctCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - var stateCount int - pgStr = `SELECT COUNT(*) FROM eth.state_cids` - err = tx.Get(&stateCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - var storageCount int - pgStr = `SELECT COUNT(*) FROM eth.storage_cids` - err = tx.Get(&storageCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - var blocksCount int - pgStr = `SELECT COUNT(*) FROM public.blocks` - err = tx.Get(&blocksCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - - err = tx.Commit() - Expect(err).ToNot(HaveOccurred()) - - Expect(headerCount).To(Equal(2)) - Expect(uncleCount).To(Equal(1)) - Expect(txCount).To(Equal(3)) - Expect(rctCount).To(Equal(0)) - Expect(stateCount).To(Equal(3)) - Expect(storageCount).To(Equal(1)) - Expect(blocksCount).To(Equal(10)) - }) - It("Cleans state and linked storage", func() { - err := cleaner.Clean(rngs, shared.State) - Expect(err).ToNot(HaveOccurred()) - - tx, err := db.Beginx() - Expect(err).ToNot(HaveOccurred()) - - var headerCount int - pgStr := `SELECT COUNT(*) FROM eth.header_cids` - err = tx.Get(&headerCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - var uncleCount int - pgStr = `SELECT COUNT(*) FROM eth.uncle_cids` - err = tx.Get(&uncleCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - var txCount int - pgStr = `SELECT COUNT(*) FROM eth.transaction_cids` - err = tx.Get(&txCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - var rctCount int - pgStr = `SELECT COUNT(*) FROM eth.receipt_cids` - err = tx.Get(&rctCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - var stateCount int - pgStr = `SELECT COUNT(*) FROM eth.state_cids` - err = tx.Get(&stateCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - var storageCount int - pgStr = `SELECT COUNT(*) FROM eth.storage_cids` - err = tx.Get(&storageCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - var blocksCount int - pgStr = `SELECT COUNT(*) FROM public.blocks` - err = tx.Get(&blocksCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - - err = tx.Commit() - Expect(err).ToNot(HaveOccurred()) - - Expect(headerCount).To(Equal(2)) - Expect(uncleCount).To(Equal(1)) - Expect(txCount).To(Equal(3)) - Expect(rctCount).To(Equal(3)) - Expect(stateCount).To(Equal(0)) - Expect(storageCount).To(Equal(0)) - Expect(blocksCount).To(Equal(9)) - }) - It("Cleans storage", func() { - err := cleaner.Clean(rngs, shared.Storage) - Expect(err).ToNot(HaveOccurred()) - - tx, err := db.Beginx() - Expect(err).ToNot(HaveOccurred()) - - var headerCount int - pgStr := `SELECT COUNT(*) FROM eth.header_cids` - err = tx.Get(&headerCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - var uncleCount int - pgStr = `SELECT COUNT(*) FROM eth.uncle_cids` - err = tx.Get(&uncleCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - var txCount int - pgStr = `SELECT COUNT(*) FROM eth.transaction_cids` - err = tx.Get(&txCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - var rctCount int - pgStr = `SELECT COUNT(*) FROM eth.receipt_cids` - err = tx.Get(&rctCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - var stateCount int - pgStr = `SELECT COUNT(*) FROM eth.state_cids` - err = tx.Get(&stateCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - var storageCount int - pgStr = `SELECT COUNT(*) FROM eth.storage_cids` - err = tx.Get(&storageCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - var blocksCount int - pgStr = `SELECT COUNT(*) FROM public.blocks` - err = tx.Get(&blocksCount, pgStr) - Expect(err).ToNot(HaveOccurred()) - - err = tx.Commit() - Expect(err).ToNot(HaveOccurred()) - - Expect(headerCount).To(Equal(2)) - Expect(uncleCount).To(Equal(1)) - Expect(txCount).To(Equal(3)) - Expect(rctCount).To(Equal(3)) - Expect(stateCount).To(Equal(3)) - Expect(storageCount).To(Equal(0)) - Expect(blocksCount).To(Equal(12)) - }) - }) - - Describe("ResetValidation", func() { - BeforeEach(func() { - for _, key := range mhKeys { - _, err := db.Exec(`INSERT INTO public.blocks (key, data) VALUES ($1, $2)`, key, mockData) - Expect(err).ToNot(HaveOccurred()) - } - - err := repo.Index(mockCIDPayload1) - Expect(err).ToNot(HaveOccurred()) - err = repo.Index(mockCIDPayload2) - Expect(err).ToNot(HaveOccurred()) - - var validationTimes []int - pgStr := `SELECT times_validated FROM eth.header_cids` - err = db.Select(&validationTimes, pgStr) - Expect(err).ToNot(HaveOccurred()) - Expect(len(validationTimes)).To(Equal(2)) - Expect(validationTimes[0]).To(Equal(1)) - Expect(validationTimes[1]).To(Equal(1)) - - err = repo.Index(mockCIDPayload1) - Expect(err).ToNot(HaveOccurred()) - - validationTimes = []int{} - pgStr = `SELECT times_validated FROM eth.header_cids ORDER BY block_number` - err = db.Select(&validationTimes, pgStr) - Expect(err).ToNot(HaveOccurred()) - Expect(len(validationTimes)).To(Equal(2)) - Expect(validationTimes[0]).To(Equal(2)) - Expect(validationTimes[1]).To(Equal(1)) - }) - AfterEach(func() { - eth.TearDownDB(db) - }) - It("Resets the validation level", func() { - err := cleaner.ResetValidation(rngs) - Expect(err).ToNot(HaveOccurred()) - - var validationTimes []int - pgStr := `SELECT times_validated FROM eth.header_cids` - err = db.Select(&validationTimes, pgStr) - Expect(err).ToNot(HaveOccurred()) - Expect(len(validationTimes)).To(Equal(2)) - Expect(validationTimes[0]).To(Equal(0)) - Expect(validationTimes[1]).To(Equal(0)) - - err = repo.Index(mockCIDPayload2) - Expect(err).ToNot(HaveOccurred()) - - validationTimes = []int{} - pgStr = `SELECT times_validated FROM eth.header_cids ORDER BY block_number` - err = db.Select(&validationTimes, pgStr) - Expect(err).ToNot(HaveOccurred()) - Expect(len(validationTimes)).To(Equal(2)) - Expect(validationTimes[0]).To(Equal(0)) - Expect(validationTimes[1]).To(Equal(1)) - }) - }) -}) diff --git a/pkg/eth/converter.go b/pkg/eth/converter.go deleted file mode 100644 index eb534773..00000000 --- a/pkg/eth/converter.go +++ /dev/null @@ -1,155 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package eth - -import ( - "fmt" - - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/core/types" - "github.com/ethereum/go-ethereum/crypto" - "github.com/ethereum/go-ethereum/params" - "github.com/ethereum/go-ethereum/rlp" - "github.com/ethereum/go-ethereum/statediff" - - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/shared" -) - -// PayloadConverter satisfies the PayloadConverter interface for ethereum -type PayloadConverter struct { - chainConfig *params.ChainConfig -} - -// NewPayloadConverter creates a pointer to a new PayloadConverter which satisfies the PayloadConverter interface -func NewPayloadConverter(chainConfig *params.ChainConfig) *PayloadConverter { - return &PayloadConverter{ - chainConfig: chainConfig, - } -} - -// Convert method is used to convert a eth statediff.Payload to an IPLDPayload -// Satisfies the shared.PayloadConverter interface -func (pc *PayloadConverter) Convert(payload shared.RawChainData) (shared.ConvertedData, error) { - stateDiffPayload, ok := payload.(statediff.Payload) - if !ok { - return nil, fmt.Errorf("eth converter: expected payload type %T got %T", statediff.Payload{}, payload) - } - // Unpack block rlp to access fields - block := new(types.Block) - if err := rlp.DecodeBytes(stateDiffPayload.BlockRlp, block); err != nil { - return nil, err - } - trxLen := len(block.Transactions()) - convertedPayload := ConvertedPayload{ - TotalDifficulty: stateDiffPayload.TotalDifficulty, - Block: block, - TxMetaData: make([]TxModel, 0, trxLen), - Receipts: make(types.Receipts, 0, trxLen), - ReceiptMetaData: make([]ReceiptModel, 0, trxLen), - StateNodes: make([]TrieNode, 0), - StorageNodes: make(map[string][]TrieNode), - } - signer := types.MakeSigner(pc.chainConfig, block.Number()) - transactions := block.Transactions() - for i, trx := range transactions { - // Extract to and from data from the the transactions for indexing - from, err := types.Sender(signer, trx) - if err != nil { - return nil, err - } - - txMeta := TxModel{ - Dst: shared.HandleZeroAddrPointer(trx.To()), - Src: shared.HandleZeroAddr(from), - TxHash: trx.Hash().String(), - Index: int64(i), - Data: trx.Data(), - } - // txMeta will have same index as its corresponding trx in the convertedPayload.BlockBody - convertedPayload.TxMetaData = append(convertedPayload.TxMetaData, txMeta) - } - - // Decode receipts for this block - receipts := make(types.Receipts, 0) - if err := rlp.DecodeBytes(stateDiffPayload.ReceiptsRlp, &receipts); err != nil { - return nil, err - } - // Derive any missing fields - if err := receipts.DeriveFields(pc.chainConfig, block.Hash(), block.NumberU64(), block.Transactions()); err != nil { - return nil, err - } - for i, receipt := range receipts { - // Extract topic and contract data from the receipt for indexing - topicSets := make([][]string, 4) - mappedContracts := make(map[string]bool) // use map to avoid duplicate addresses - for _, log := range receipt.Logs { - for i, topic := range log.Topics { - topicSets[i] = append(topicSets[i], topic.Hex()) - } - mappedContracts[log.Address.String()] = true - } - // These are the contracts seen in the logs - logContracts := make([]string, 0, len(mappedContracts)) - for addr := range mappedContracts { - logContracts = append(logContracts, addr) - } - // This is the contract address if this receipt is for a contract creation tx - contract := shared.HandleZeroAddr(receipt.ContractAddress) - var contractHash string - if contract != "" { - convertedPayload.TxMetaData[i].Deployment = true - contractHash = crypto.Keccak256Hash(common.HexToAddress(contract).Bytes()).String() - } - rctMeta := ReceiptModel{ - Topic0s: topicSets[0], - Topic1s: topicSets[1], - Topic2s: topicSets[2], - Topic3s: topicSets[3], - Contract: contract, - ContractHash: contractHash, - LogContracts: logContracts, - } - // receipt and rctMeta will have same indexes - convertedPayload.Receipts = append(convertedPayload.Receipts, receipt) - convertedPayload.ReceiptMetaData = append(convertedPayload.ReceiptMetaData, rctMeta) - } - - // Unpack state diff rlp to access fields - stateDiff := new(statediff.StateObject) - if err := rlp.DecodeBytes(stateDiffPayload.StateObjectRlp, stateDiff); err != nil { - return nil, err - } - for _, stateNode := range stateDiff.Nodes { - statePath := common.Bytes2Hex(stateNode.Path) - convertedPayload.StateNodes = append(convertedPayload.StateNodes, TrieNode{ - Path: stateNode.Path, - Value: stateNode.NodeValue, - Type: stateNode.NodeType, - LeafKey: common.BytesToHash(stateNode.LeafKey), - }) - for _, storageNode := range stateNode.StorageNodes { - convertedPayload.StorageNodes[statePath] = append(convertedPayload.StorageNodes[statePath], TrieNode{ - Path: storageNode.Path, - Value: storageNode.NodeValue, - Type: storageNode.NodeType, - LeafKey: common.BytesToHash(storageNode.LeafKey), - }) - } - } - - return convertedPayload, nil -} diff --git a/pkg/eth/converter_test.go b/pkg/eth/converter_test.go deleted file mode 100644 index 8f681757..00000000 --- a/pkg/eth/converter_test.go +++ /dev/null @@ -1,54 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package eth_test - -import ( - "github.com/ethereum/go-ethereum/params" - "github.com/ethereum/go-ethereum/rlp" - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/eth" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/eth/mocks" -) - -var _ = Describe("Converter", func() { - Describe("Convert", func() { - It("Converts mock statediff.Payloads into the expected IPLDPayloads", func() { - converter := eth.NewPayloadConverter(params.MainnetChainConfig) - payload, err := converter.Convert(mocks.MockStateDiffPayload) - Expect(err).ToNot(HaveOccurred()) - convertedPayload, ok := payload.(eth.ConvertedPayload) - Expect(ok).To(BeTrue()) - Expect(convertedPayload.Block.Number().String()).To(Equal(mocks.BlockNumber.String())) - Expect(convertedPayload.Block.Hash().String()).To(Equal(mocks.MockBlock.Hash().String())) - Expect(convertedPayload.StateNodes).To(Equal(mocks.MockStateNodes)) - Expect(convertedPayload.StorageNodes).To(Equal(mocks.MockStorageNodes)) - Expect(convertedPayload.TotalDifficulty.Int64()).To(Equal(mocks.MockStateDiffPayload.TotalDifficulty.Int64())) - gotBody, err := rlp.EncodeToBytes(convertedPayload.Block.Body()) - Expect(err).ToNot(HaveOccurred()) - expectedBody, err := rlp.EncodeToBytes(mocks.MockBlock.Body()) - Expect(err).ToNot(HaveOccurred()) - Expect(gotBody).To(Equal(expectedBody)) - gotHeader, err := rlp.EncodeToBytes(convertedPayload.Block.Header()) - Expect(err).ToNot(HaveOccurred()) - Expect(gotHeader).To(Equal(mocks.MockHeaderRlp)) - Expect(convertedPayload.TxMetaData).To(Equal(mocks.MockTrxMeta)) - Expect(convertedPayload.ReceiptMetaData).To(Equal(mocks.MockRctMeta)) - }) - }) -}) diff --git a/pkg/eth/indexer.go b/pkg/eth/indexer.go deleted file mode 100644 index f8170381..00000000 --- a/pkg/eth/indexer.go +++ /dev/null @@ -1,206 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package eth - -import ( - "fmt" - - "github.com/ethereum/go-ethereum/common" - "github.com/jmoiron/sqlx" - log "github.com/sirupsen/logrus" - - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/postgres" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/shared" -) - -var ( - nullHash = common.HexToHash("0x0000000000000000000000000000000000000000000000000000000000000000") -) - -// Indexer satisfies the Indexer interface for ethereum -type CIDIndexer struct { - db *postgres.DB -} - -// NewCIDIndexer creates a new pointer to a Indexer which satisfies the CIDIndexer interface -func NewCIDIndexer(db *postgres.DB) *CIDIndexer { - return &CIDIndexer{ - db: db, - } -} - -// Index indexes a cidPayload in Postgres -func (in *CIDIndexer) Index(cids shared.CIDsForIndexing) error { - cidPayload, ok := cids.(*CIDPayload) - if !ok { - return fmt.Errorf("eth indexer expected cids type %T got %T", &CIDPayload{}, cids) - } - - // Begin new db tx - tx, err := in.db.Beginx() - if err != nil { - return err - } - defer func() { - if p := recover(); p != nil { - shared.Rollback(tx) - panic(p) - } else if err != nil { - shared.Rollback(tx) - } else { - err = tx.Commit() - } - }() - - headerID, err := in.indexHeaderCID(tx, cidPayload.HeaderCID) - if err != nil { - log.Error("eth indexer error when indexing header") - return err - } - for _, uncle := range cidPayload.UncleCIDs { - if err := in.indexUncleCID(tx, uncle, headerID); err != nil { - log.Error("eth indexer error when indexing uncle") - return err - } - } - if err := in.indexTransactionAndReceiptCIDs(tx, cidPayload, headerID); err != nil { - log.Error("eth indexer error when indexing transactions and receipts") - return err - } - err = in.indexStateAndStorageCIDs(tx, cidPayload, headerID) - if err != nil { - log.Error("eth indexer error when indexing state and storage nodes") - } - return err -} - -func (in *CIDIndexer) indexHeaderCID(tx *sqlx.Tx, header HeaderModel) (int64, error) { - var headerID int64 - err := tx.QueryRowx(`INSERT INTO eth.header_cids (block_number, block_hash, parent_hash, cid, td, node_id, reward, state_root, tx_root, receipt_root, uncle_root, bloom, timestamp, mh_key, times_validated) - VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15) - ON CONFLICT (block_number, block_hash) DO UPDATE SET (parent_hash, cid, td, node_id, reward, state_root, tx_root, receipt_root, uncle_root, bloom, timestamp, mh_key, times_validated) = ($3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, eth.header_cids.times_validated + 1) - RETURNING id`, - header.BlockNumber, header.BlockHash, header.ParentHash, header.CID, header.TotalDifficulty, in.db.NodeID, header.Reward, header.StateRoot, header.TxRoot, - header.RctRoot, header.UncleRoot, header.Bloom, header.Timestamp, header.MhKey, 1).Scan(&headerID) - return headerID, err -} - -func (in *CIDIndexer) indexUncleCID(tx *sqlx.Tx, uncle UncleModel, headerID int64) error { - _, err := tx.Exec(`INSERT INTO eth.uncle_cids (block_hash, header_id, parent_hash, cid, reward, mh_key) VALUES ($1, $2, $3, $4, $5, $6) - ON CONFLICT (header_id, block_hash) DO UPDATE SET (parent_hash, cid, reward, mh_key) = ($3, $4, $5, $6)`, - uncle.BlockHash, headerID, uncle.ParentHash, uncle.CID, uncle.Reward, uncle.MhKey) - return err -} - -func (in *CIDIndexer) indexTransactionAndReceiptCIDs(tx *sqlx.Tx, payload *CIDPayload, headerID int64) error { - for _, trxCidMeta := range payload.TransactionCIDs { - var txID int64 - err := tx.QueryRowx(`INSERT INTO eth.transaction_cids (header_id, tx_hash, cid, dst, src, index, mh_key, tx_data, deployment) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9) - ON CONFLICT (header_id, tx_hash) DO UPDATE SET (cid, dst, src, index, mh_key, tx_data, deployment) = ($3, $4, $5, $6, $7, $8, $9) - RETURNING id`, - headerID, trxCidMeta.TxHash, trxCidMeta.CID, trxCidMeta.Dst, trxCidMeta.Src, trxCidMeta.Index, trxCidMeta.MhKey, trxCidMeta.Data, trxCidMeta.Deployment).Scan(&txID) - if err != nil { - return err - } - receiptCidMeta, ok := payload.ReceiptCIDs[common.HexToHash(trxCidMeta.TxHash)] - if ok { - if err := in.indexReceiptCID(tx, receiptCidMeta, txID); err != nil { - return err - } - } - } - return nil -} - -func (in *CIDIndexer) indexTransactionCID(tx *sqlx.Tx, transaction TxModel, headerID int64) (int64, error) { - var txID int64 - err := tx.QueryRowx(`INSERT INTO eth.transaction_cids (header_id, tx_hash, cid, dst, src, index, mh_key, tx_data, deployment) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9) - ON CONFLICT (header_id, tx_hash) DO UPDATE SET (cid, dst, src, index, mh_key, tx_data, deployment) = ($3, $4, $5, $6, $7, $8, $9) - RETURNING id`, - headerID, transaction.TxHash, transaction.CID, transaction.Dst, transaction.Src, transaction.Index, transaction.MhKey, transaction.Data, transaction.Deployment).Scan(&txID) - return txID, err -} - -func (in *CIDIndexer) indexReceiptCID(tx *sqlx.Tx, cidMeta ReceiptModel, txID int64) error { - _, err := tx.Exec(`INSERT INTO eth.receipt_cids (tx_id, cid, contract, contract_hash, topic0s, topic1s, topic2s, topic3s, log_contracts, mh_key) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10) - ON CONFLICT (tx_id) DO UPDATE SET (cid, contract, contract_hash, topic0s, topic1s, topic2s, topic3s, log_contracts, mh_key) = ($2, $3, $4, $5, $6, $7, $8, $9, $10)`, - txID, cidMeta.CID, cidMeta.Contract, cidMeta.ContractHash, cidMeta.Topic0s, cidMeta.Topic1s, cidMeta.Topic2s, cidMeta.Topic3s, cidMeta.LogContracts, cidMeta.MhKey) - return err -} - -func (in *CIDIndexer) indexStateAndStorageCIDs(tx *sqlx.Tx, payload *CIDPayload, headerID int64) error { - for _, stateCID := range payload.StateNodeCIDs { - var stateID int64 - var stateKey string - if stateCID.StateKey != nullHash.String() { - stateKey = stateCID.StateKey - } - err := tx.QueryRowx(`INSERT INTO eth.state_cids (header_id, state_leaf_key, cid, state_path, node_type, diff, mh_key) VALUES ($1, $2, $3, $4, $5, $6, $7) - ON CONFLICT (header_id, state_path) DO UPDATE SET (state_leaf_key, cid, node_type, diff, mh_key) = ($2, $3, $5, $6, $7) - RETURNING id`, - headerID, stateKey, stateCID.CID, stateCID.Path, stateCID.NodeType, true, stateCID.MhKey).Scan(&stateID) - if err != nil { - return err - } - // If we have a state leaf node, index the associated account and storage nodes - if stateCID.NodeType == 2 { - statePath := common.Bytes2Hex(stateCID.Path) - for _, storageCID := range payload.StorageNodeCIDs[statePath] { - if err := in.indexStorageCID(tx, storageCID, stateID); err != nil { - return err - } - } - if stateAccount, ok := payload.StateAccounts[statePath]; ok { - if err := in.indexStateAccount(tx, stateAccount, stateID); err != nil { - return err - } - } - } - } - return nil -} - -func (in *CIDIndexer) indexStateCID(tx *sqlx.Tx, stateNode StateNodeModel, headerID int64) (int64, error) { - var stateID int64 - var stateKey string - if stateNode.StateKey != nullHash.String() { - stateKey = stateNode.StateKey - } - err := tx.QueryRowx(`INSERT INTO eth.state_cids (header_id, state_leaf_key, cid, state_path, node_type, diff, mh_key) VALUES ($1, $2, $3, $4, $5, $6, $7) - ON CONFLICT (header_id, state_path) DO UPDATE SET (state_leaf_key, cid, node_type, diff, mh_key) = ($2, $3, $5, $6, $7) - RETURNING id`, - headerID, stateKey, stateNode.CID, stateNode.Path, stateNode.NodeType, true, stateNode.MhKey).Scan(&stateID) - return stateID, err -} - -func (in *CIDIndexer) indexStateAccount(tx *sqlx.Tx, stateAccount StateAccountModel, stateID int64) error { - _, err := tx.Exec(`INSERT INTO eth.state_accounts (state_id, balance, nonce, code_hash, storage_root) VALUES ($1, $2, $3, $4, $5) - ON CONFLICT (state_id) DO UPDATE SET (balance, nonce, code_hash, storage_root) = ($2, $3, $4, $5)`, - stateID, stateAccount.Balance, stateAccount.Nonce, stateAccount.CodeHash, stateAccount.StorageRoot) - return err -} - -func (in *CIDIndexer) indexStorageCID(tx *sqlx.Tx, storageCID StorageNodeModel, stateID int64) error { - var storageKey string - if storageCID.StorageKey != nullHash.String() { - storageKey = storageCID.StorageKey - } - _, err := tx.Exec(`INSERT INTO eth.storage_cids (state_id, storage_leaf_key, cid, storage_path, node_type, diff, mh_key) VALUES ($1, $2, $3, $4, $5, $6, $7) - ON CONFLICT (state_id, storage_path) DO UPDATE SET (storage_leaf_key, cid, node_type, diff, mh_key) = ($2, $3, $5, $6, $7)`, - stateID, storageKey, storageCID.CID, storageCID.Path, storageCID.NodeType, true, storageCID.MhKey) - return err -} diff --git a/pkg/eth/indexer_test.go b/pkg/eth/indexer_test.go deleted file mode 100644 index a00c958a..00000000 --- a/pkg/eth/indexer_test.go +++ /dev/null @@ -1,137 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package eth_test - -import ( - "github.com/ethereum/go-ethereum/common" - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/eth" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/eth/mocks" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/postgres" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/shared" -) - -var _ = Describe("Indexer", func() { - var ( - db *postgres.DB - err error - repo *eth.CIDIndexer - ) - BeforeEach(func() { - db, err = shared.SetupDB() - Expect(err).ToNot(HaveOccurred()) - repo = eth.NewCIDIndexer(db) - // need entries in the public.blocks with the mhkeys or the FK constraint will fail - shared.PublishMockIPLD(db, mocks.HeaderMhKey, mockData) - shared.PublishMockIPLD(db, mocks.Trx1MhKey, mockData) - shared.PublishMockIPLD(db, mocks.Trx2MhKey, mockData) - shared.PublishMockIPLD(db, mocks.Trx3MhKey, mockData) - shared.PublishMockIPLD(db, mocks.Rct1MhKey, mockData) - shared.PublishMockIPLD(db, mocks.Rct2MhKey, mockData) - shared.PublishMockIPLD(db, mocks.Rct3MhKey, mockData) - shared.PublishMockIPLD(db, mocks.State1MhKey, mockData) - shared.PublishMockIPLD(db, mocks.State2MhKey, mockData) - shared.PublishMockIPLD(db, mocks.StorageMhKey, mockData) - }) - AfterEach(func() { - eth.TearDownDB(db) - }) - - Describe("Index", func() { - It("Indexes CIDs and related metadata into vulcanizedb", func() { - err = repo.Index(mocks.MockCIDPayload) - Expect(err).ToNot(HaveOccurred()) - pgStr := `SELECT cid, td, reward, id - FROM eth.header_cids - WHERE block_number = $1` - // check header was properly indexed - type res struct { - CID string - TD string - Reward string - ID int - } - headers := new(res) - err = db.QueryRowx(pgStr, 1).StructScan(headers) - Expect(err).ToNot(HaveOccurred()) - Expect(headers.CID).To(Equal(mocks.HeaderCID.String())) - Expect(headers.TD).To(Equal(mocks.MockBlock.Difficulty().String())) - Expect(headers.Reward).To(Equal("5000000000000000000")) - // check trxs were properly indexed - trxs := make([]string, 0) - pgStr = `SELECT transaction_cids.cid FROM eth.transaction_cids INNER JOIN eth.header_cids ON (transaction_cids.header_id = header_cids.id) - WHERE header_cids.block_number = $1` - err = db.Select(&trxs, pgStr, 1) - Expect(err).ToNot(HaveOccurred()) - Expect(len(trxs)).To(Equal(3)) - Expect(shared.ListContainsString(trxs, mocks.Trx1CID.String())).To(BeTrue()) - Expect(shared.ListContainsString(trxs, mocks.Trx2CID.String())).To(BeTrue()) - Expect(shared.ListContainsString(trxs, mocks.Trx3CID.String())).To(BeTrue()) - // check receipts were properly indexed - rcts := make([]string, 0) - pgStr = `SELECT receipt_cids.cid FROM eth.receipt_cids, eth.transaction_cids, eth.header_cids - WHERE receipt_cids.tx_id = transaction_cids.id - AND transaction_cids.header_id = header_cids.id - AND header_cids.block_number = $1` - err = db.Select(&rcts, pgStr, 1) - Expect(err).ToNot(HaveOccurred()) - Expect(len(rcts)).To(Equal(3)) - Expect(shared.ListContainsString(rcts, mocks.Rct1CID.String())).To(BeTrue()) - Expect(shared.ListContainsString(rcts, mocks.Rct2CID.String())).To(BeTrue()) - Expect(shared.ListContainsString(rcts, mocks.Rct3CID.String())).To(BeTrue()) - // check that state nodes were properly indexed - stateNodes := make([]eth.StateNodeModel, 0) - pgStr = `SELECT state_cids.cid, state_cids.state_leaf_key, state_cids.node_type, state_cids.state_path, state_cids.header_id - FROM eth.state_cids INNER JOIN eth.header_cids ON (state_cids.header_id = header_cids.id) - WHERE header_cids.block_number = $1` - err = db.Select(&stateNodes, pgStr, 1) - Expect(err).ToNot(HaveOccurred()) - Expect(len(stateNodes)).To(Equal(2)) - for _, stateNode := range stateNodes { - if stateNode.CID == mocks.State1CID.String() { - Expect(stateNode.NodeType).To(Equal(2)) - Expect(stateNode.StateKey).To(Equal(common.BytesToHash(mocks.ContractLeafKey).Hex())) - Expect(stateNode.Path).To(Equal([]byte{'\x06'})) - } - if stateNode.CID == mocks.State2CID.String() { - Expect(stateNode.NodeType).To(Equal(2)) - Expect(stateNode.StateKey).To(Equal(common.BytesToHash(mocks.AccountLeafKey).Hex())) - Expect(stateNode.Path).To(Equal([]byte{'\x0c'})) - } - } - // check that storage nodes were properly indexed - storageNodes := make([]eth.StorageNodeWithStateKeyModel, 0) - pgStr = `SELECT storage_cids.cid, state_cids.state_leaf_key, storage_cids.storage_leaf_key, storage_cids.node_type, storage_cids.storage_path - FROM eth.storage_cids, eth.state_cids, eth.header_cids - WHERE storage_cids.state_id = state_cids.id - AND state_cids.header_id = header_cids.id - AND header_cids.block_number = $1` - err = db.Select(&storageNodes, pgStr, 1) - Expect(err).ToNot(HaveOccurred()) - Expect(len(storageNodes)).To(Equal(1)) - Expect(storageNodes[0]).To(Equal(eth.StorageNodeWithStateKeyModel{ - CID: mocks.StorageCID.String(), - NodeType: 2, - StorageKey: common.BytesToHash(mocks.StorageLeafKey).Hex(), - StateKey: common.BytesToHash(mocks.ContractLeafKey).Hex(), - Path: []byte{}, - })) - }) - }) -}) diff --git a/pkg/eth/mocks/test_data.go b/pkg/eth/mocks/test_data.go deleted file mode 100644 index a8db6de0..00000000 --- a/pkg/eth/mocks/test_data.go +++ /dev/null @@ -1,582 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package mocks - -import ( - "crypto/ecdsa" - "crypto/elliptic" - "crypto/rand" - "math/big" - - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/shared" - - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/core/state" - "github.com/ethereum/go-ethereum/core/types" - "github.com/ethereum/go-ethereum/crypto" - "github.com/ethereum/go-ethereum/params" - "github.com/ethereum/go-ethereum/rlp" - "github.com/ethereum/go-ethereum/statediff" - "github.com/ethereum/go-ethereum/statediff/testhelpers" - "github.com/ipfs/go-block-format" - "github.com/multiformats/go-multihash" - log "github.com/sirupsen/logrus" - - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/eth" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/ipfs" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/ipfs/ipld" -) - -// Test variables -var ( - // block data - BlockNumber = big.NewInt(1) - MockHeader = types.Header{ - Time: 0, - Number: new(big.Int).Set(BlockNumber), - Root: common.HexToHash("0x0"), - TxHash: common.HexToHash("0x0"), - ReceiptHash: common.HexToHash("0x0"), - Difficulty: big.NewInt(5000000), - Extra: []byte{}, - } - MockTransactions, MockReceipts, SenderAddr = createTransactionsAndReceipts() - ReceiptsRlp, _ = rlp.EncodeToBytes(MockReceipts) - MockBlock = types.NewBlock(&MockHeader, MockTransactions, nil, MockReceipts) - MockBlockRlp, _ = rlp.EncodeToBytes(MockBlock) - MockHeaderRlp, _ = rlp.EncodeToBytes(MockBlock.Header()) - Address = common.HexToAddress("0xaE9BEa628c4Ce503DcFD7E305CaB4e29E7476592") - AnotherAddress = common.HexToAddress("0xaE9BEa628c4Ce503DcFD7E305CaB4e29E7476593") - ContractAddress = crypto.CreateAddress(SenderAddr, MockTransactions[2].Nonce()) - ContractHash = crypto.Keccak256Hash(ContractAddress.Bytes()).String() - MockContractByteCode = []byte{0, 1, 2, 3, 4, 5} - mockTopic11 = common.HexToHash("0x04") - mockTopic12 = common.HexToHash("0x06") - mockTopic21 = common.HexToHash("0x05") - mockTopic22 = common.HexToHash("0x07") - MockLog1 = &types.Log{ - Address: Address, - Topics: []common.Hash{mockTopic11, mockTopic12}, - Data: []byte{}, - } - MockLog2 = &types.Log{ - Address: AnotherAddress, - Topics: []common.Hash{mockTopic21, mockTopic22}, - Data: []byte{}, - } - HeaderCID, _ = ipld.RawdataToCid(ipld.MEthHeader, MockHeaderRlp, multihash.KECCAK_256) - HeaderMhKey = shared.MultihashKeyFromCID(HeaderCID) - Trx1CID, _ = ipld.RawdataToCid(ipld.MEthTx, MockTransactions.GetRlp(0), multihash.KECCAK_256) - Trx1MhKey = shared.MultihashKeyFromCID(Trx1CID) - Trx2CID, _ = ipld.RawdataToCid(ipld.MEthTx, MockTransactions.GetRlp(1), multihash.KECCAK_256) - Trx2MhKey = shared.MultihashKeyFromCID(Trx2CID) - Trx3CID, _ = ipld.RawdataToCid(ipld.MEthTx, MockTransactions.GetRlp(2), multihash.KECCAK_256) - Trx3MhKey = shared.MultihashKeyFromCID(Trx3CID) - Rct1CID, _ = ipld.RawdataToCid(ipld.MEthTxReceipt, MockReceipts.GetRlp(0), multihash.KECCAK_256) - Rct1MhKey = shared.MultihashKeyFromCID(Rct1CID) - Rct2CID, _ = ipld.RawdataToCid(ipld.MEthTxReceipt, MockReceipts.GetRlp(1), multihash.KECCAK_256) - Rct2MhKey = shared.MultihashKeyFromCID(Rct2CID) - Rct3CID, _ = ipld.RawdataToCid(ipld.MEthTxReceipt, MockReceipts.GetRlp(2), multihash.KECCAK_256) - Rct3MhKey = shared.MultihashKeyFromCID(Rct3CID) - State1CID, _ = ipld.RawdataToCid(ipld.MEthStateTrie, ContractLeafNode, multihash.KECCAK_256) - State1MhKey = shared.MultihashKeyFromCID(State1CID) - State2CID, _ = ipld.RawdataToCid(ipld.MEthStateTrie, AccountLeafNode, multihash.KECCAK_256) - State2MhKey = shared.MultihashKeyFromCID(State2CID) - StorageCID, _ = ipld.RawdataToCid(ipld.MEthStorageTrie, StorageLeafNode, multihash.KECCAK_256) - StorageMhKey = shared.MultihashKeyFromCID(StorageCID) - MockTrxMeta = []eth.TxModel{ - { - CID: "", // This is empty until we go to publish to ipfs - MhKey: "", - Src: SenderAddr.Hex(), - Dst: Address.String(), - Index: 0, - TxHash: MockTransactions[0].Hash().String(), - Data: []byte{}, - Deployment: false, - }, - { - CID: "", - MhKey: "", - Src: SenderAddr.Hex(), - Dst: AnotherAddress.String(), - Index: 1, - TxHash: MockTransactions[1].Hash().String(), - Data: []byte{}, - Deployment: false, - }, - { - CID: "", - MhKey: "", - Src: SenderAddr.Hex(), - Dst: "", - Index: 2, - TxHash: MockTransactions[2].Hash().String(), - Data: MockContractByteCode, - Deployment: true, - }, - } - MockTrxMetaPostPublsh = []eth.TxModel{ - { - CID: Trx1CID.String(), // This is empty until we go to publish to ipfs - MhKey: Trx1MhKey, - Src: SenderAddr.Hex(), - Dst: Address.String(), - Index: 0, - TxHash: MockTransactions[0].Hash().String(), - Data: []byte{}, - Deployment: false, - }, - { - CID: Trx2CID.String(), - MhKey: Trx2MhKey, - Src: SenderAddr.Hex(), - Dst: AnotherAddress.String(), - Index: 1, - TxHash: MockTransactions[1].Hash().String(), - Data: []byte{}, - Deployment: false, - }, - { - CID: Trx3CID.String(), - MhKey: Trx3MhKey, - Src: SenderAddr.Hex(), - Dst: "", - Index: 2, - TxHash: MockTransactions[2].Hash().String(), - Data: MockContractByteCode, - Deployment: true, - }, - } - MockRctMeta = []eth.ReceiptModel{ - { - CID: "", - MhKey: "", - Topic0s: []string{ - mockTopic11.String(), - }, - Topic1s: []string{ - mockTopic12.String(), - }, - Contract: "", - ContractHash: "", - LogContracts: []string{ - Address.String(), - }, - }, - { - CID: "", - MhKey: "", - Topic0s: []string{ - mockTopic21.String(), - }, - Topic1s: []string{ - mockTopic22.String(), - }, - Contract: "", - ContractHash: "", - LogContracts: []string{ - AnotherAddress.String(), - }, - }, - { - CID: "", - MhKey: "", - Contract: ContractAddress.String(), - ContractHash: ContractHash, - LogContracts: []string{}, - }, - } - MockRctMetaPostPublish = []eth.ReceiptModel{ - { - CID: Rct1CID.String(), - MhKey: Rct1MhKey, - Topic0s: []string{ - mockTopic11.String(), - }, - Topic1s: []string{ - mockTopic12.String(), - }, - Contract: "", - ContractHash: "", - LogContracts: []string{ - Address.String(), - }, - }, - { - CID: Rct2CID.String(), - MhKey: Rct2MhKey, - Topic0s: []string{ - mockTopic21.String(), - }, - Topic1s: []string{ - mockTopic22.String(), - }, - Contract: "", - ContractHash: "", - LogContracts: []string{ - AnotherAddress.String(), - }, - }, - { - CID: Rct3CID.String(), - MhKey: Rct3MhKey, - Contract: ContractAddress.String(), - ContractHash: ContractHash, - LogContracts: []string{}, - }, - } - - // statediff data - storageLocation = common.HexToHash("0") - StorageLeafKey = crypto.Keccak256Hash(storageLocation[:]).Bytes() - StorageValue = common.Hex2Bytes("01") - StoragePartialPath = common.Hex2Bytes("20290decd9548b62a8d60345a988386fc84ba6bc95484008f6362f93160ef3e563") - StorageLeafNode, _ = rlp.EncodeToBytes([]interface{}{ - StoragePartialPath, - StorageValue, - }) - - nonce1 = uint64(1) - ContractRoot = "0x821e2556a290c86405f8160a2d662042a431ba456b9db265c79bb837c04be5f0" - ContractCodeHash = common.HexToHash("0x753f98a8d4328b15636e46f66f2cb4bc860100aa17967cc145fcd17d1d4710ea") - contractPath = common.Bytes2Hex([]byte{'\x06'}) - ContractLeafKey = testhelpers.AddressToLeafKey(ContractAddress) - ContractAccount, _ = rlp.EncodeToBytes(state.Account{ - Nonce: nonce1, - Balance: big.NewInt(0), - CodeHash: ContractCodeHash.Bytes(), - Root: common.HexToHash(ContractRoot), - }) - ContractPartialPath = common.Hex2Bytes("3114658a74d9cc9f7acf2c5cd696c3494d7c344d78bfec3add0d91ec4e8d1c45") - ContractLeafNode, _ = rlp.EncodeToBytes([]interface{}{ - ContractPartialPath, - ContractAccount, - }) - - nonce0 = uint64(0) - AccountRoot = "0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421" - AccountCodeHash = common.HexToHash("0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470") - accountPath = common.Bytes2Hex([]byte{'\x0c'}) - AccountAddresss = common.HexToAddress("0x0D3ab14BBaD3D99F4203bd7a11aCB94882050E7e") - AccountLeafKey = testhelpers.Account2LeafKey - Account, _ = rlp.EncodeToBytes(state.Account{ - Nonce: nonce0, - Balance: big.NewInt(1000), - CodeHash: AccountCodeHash.Bytes(), - Root: common.HexToHash(AccountRoot), - }) - AccountPartialPath = common.Hex2Bytes("3957f3e2f04a0764c3a0491b175f69926da61efbcc8f61fa1455fd2d2b4cdd45") - AccountLeafNode, _ = rlp.EncodeToBytes([]interface{}{ - AccountPartialPath, - Account, - }) - - StateDiffs = []statediff.StateNode{ - { - Path: []byte{'\x06'}, - NodeType: statediff.Leaf, - LeafKey: ContractLeafKey, - NodeValue: ContractLeafNode, - StorageNodes: []statediff.StorageNode{ - { - Path: []byte{}, - NodeType: statediff.Leaf, - LeafKey: StorageLeafKey, - NodeValue: StorageLeafNode, - }, - }, - }, - { - Path: []byte{'\x0c'}, - NodeType: statediff.Leaf, - LeafKey: AccountLeafKey, - NodeValue: AccountLeafNode, - StorageNodes: []statediff.StorageNode{}, - }, - } - - MockStateDiff = statediff.StateObject{ - BlockNumber: new(big.Int).Set(BlockNumber), - BlockHash: MockBlock.Hash(), - Nodes: StateDiffs, - } - MockStateDiffBytes, _ = rlp.EncodeToBytes(MockStateDiff) - MockStateNodes = []eth.TrieNode{ - { - LeafKey: common.BytesToHash(ContractLeafKey), - Path: []byte{'\x06'}, - Value: ContractLeafNode, - Type: statediff.Leaf, - }, - { - LeafKey: common.BytesToHash(AccountLeafKey), - Path: []byte{'\x0c'}, - Value: AccountLeafNode, - Type: statediff.Leaf, - }, - } - MockStateMetaPostPublish = []eth.StateNodeModel{ - { - CID: State1CID.String(), - MhKey: State1MhKey, - Path: []byte{'\x06'}, - NodeType: 2, - StateKey: common.BytesToHash(ContractLeafKey).Hex(), - }, - { - CID: State2CID.String(), - MhKey: State2MhKey, - Path: []byte{'\x0c'}, - NodeType: 2, - StateKey: common.BytesToHash(AccountLeafKey).Hex(), - }, - } - MockStorageNodes = map[string][]eth.TrieNode{ - contractPath: { - { - LeafKey: common.BytesToHash(StorageLeafKey), - Value: StorageLeafNode, - Type: statediff.Leaf, - Path: []byte{}, - }, - }, - } - - // aggregate payloads - MockStateDiffPayload = statediff.Payload{ - BlockRlp: MockBlockRlp, - StateObjectRlp: MockStateDiffBytes, - ReceiptsRlp: ReceiptsRlp, - TotalDifficulty: MockBlock.Difficulty(), - } - - MockConvertedPayload = eth.ConvertedPayload{ - TotalDifficulty: MockBlock.Difficulty(), - Block: MockBlock, - Receipts: MockReceipts, - TxMetaData: MockTrxMeta, - ReceiptMetaData: MockRctMeta, - StorageNodes: MockStorageNodes, - StateNodes: MockStateNodes, - } - - MockCIDPayload = ð.CIDPayload{ - HeaderCID: eth.HeaderModel{ - BlockHash: MockBlock.Hash().String(), - BlockNumber: MockBlock.Number().String(), - CID: HeaderCID.String(), - MhKey: HeaderMhKey, - ParentHash: MockBlock.ParentHash().String(), - TotalDifficulty: MockBlock.Difficulty().String(), - Reward: "5000000000000000000", - StateRoot: MockBlock.Root().String(), - RctRoot: MockBlock.ReceiptHash().String(), - TxRoot: MockBlock.TxHash().String(), - UncleRoot: MockBlock.UncleHash().String(), - Bloom: MockBlock.Bloom().Bytes(), - Timestamp: MockBlock.Time(), - }, - UncleCIDs: []eth.UncleModel{}, - TransactionCIDs: MockTrxMetaPostPublsh, - ReceiptCIDs: map[common.Hash]eth.ReceiptModel{ - MockTransactions[0].Hash(): MockRctMetaPostPublish[0], - MockTransactions[1].Hash(): MockRctMetaPostPublish[1], - MockTransactions[2].Hash(): MockRctMetaPostPublish[2], - }, - StateNodeCIDs: MockStateMetaPostPublish, - StorageNodeCIDs: map[string][]eth.StorageNodeModel{ - contractPath: { - { - CID: StorageCID.String(), - MhKey: StorageMhKey, - Path: []byte{}, - StorageKey: common.BytesToHash(StorageLeafKey).Hex(), - NodeType: 2, - }, - }, - }, - StateAccounts: map[string]eth.StateAccountModel{ - contractPath: { - Balance: big.NewInt(0).String(), - Nonce: nonce1, - CodeHash: ContractCodeHash.Bytes(), - StorageRoot: common.HexToHash(ContractRoot).String(), - }, - accountPath: { - Balance: big.NewInt(1000).String(), - Nonce: nonce0, - CodeHash: AccountCodeHash.Bytes(), - StorageRoot: common.HexToHash(AccountRoot).String(), - }, - }, - } - - MockCIDWrapper = ð.CIDWrapper{ - BlockNumber: new(big.Int).Set(BlockNumber), - Header: eth.HeaderModel{ - BlockNumber: "1", - BlockHash: MockBlock.Hash().String(), - ParentHash: "0x0000000000000000000000000000000000000000000000000000000000000000", - CID: HeaderCID.String(), - MhKey: HeaderMhKey, - TotalDifficulty: MockBlock.Difficulty().String(), - Reward: "5000000000000000000", - StateRoot: MockBlock.Root().String(), - RctRoot: MockBlock.ReceiptHash().String(), - TxRoot: MockBlock.TxHash().String(), - UncleRoot: MockBlock.UncleHash().String(), - Bloom: MockBlock.Bloom().Bytes(), - Timestamp: MockBlock.Time(), - TimesValidated: 1, - }, - Transactions: MockTrxMetaPostPublsh, - Receipts: MockRctMetaPostPublish, - Uncles: []eth.UncleModel{}, - StateNodes: MockStateMetaPostPublish, - StorageNodes: []eth.StorageNodeWithStateKeyModel{ - { - Path: []byte{}, - CID: StorageCID.String(), - MhKey: StorageMhKey, - NodeType: 2, - StateKey: common.BytesToHash(ContractLeafKey).Hex(), - StorageKey: common.BytesToHash(StorageLeafKey).Hex(), - }, - }, - } - - HeaderIPLD, _ = blocks.NewBlockWithCid(MockHeaderRlp, HeaderCID) - Trx1IPLD, _ = blocks.NewBlockWithCid(MockTransactions.GetRlp(0), Trx1CID) - Trx2IPLD, _ = blocks.NewBlockWithCid(MockTransactions.GetRlp(1), Trx2CID) - Trx3IPLD, _ = blocks.NewBlockWithCid(MockTransactions.GetRlp(2), Trx3CID) - Rct1IPLD, _ = blocks.NewBlockWithCid(MockReceipts.GetRlp(0), Rct1CID) - Rct2IPLD, _ = blocks.NewBlockWithCid(MockReceipts.GetRlp(1), Rct2CID) - Rct3IPLD, _ = blocks.NewBlockWithCid(MockReceipts.GetRlp(2), Rct3CID) - State1IPLD, _ = blocks.NewBlockWithCid(ContractLeafNode, State1CID) - State2IPLD, _ = blocks.NewBlockWithCid(AccountLeafNode, State2CID) - StorageIPLD, _ = blocks.NewBlockWithCid(StorageLeafNode, StorageCID) - - MockIPLDs = eth.IPLDs{ - BlockNumber: new(big.Int).Set(BlockNumber), - Header: ipfs.BlockModel{ - Data: HeaderIPLD.RawData(), - CID: HeaderIPLD.Cid().String(), - }, - Transactions: []ipfs.BlockModel{ - { - Data: Trx1IPLD.RawData(), - CID: Trx1IPLD.Cid().String(), - }, - { - Data: Trx2IPLD.RawData(), - CID: Trx2IPLD.Cid().String(), - }, - { - Data: Trx3IPLD.RawData(), - CID: Trx3IPLD.Cid().String(), - }, - }, - Receipts: []ipfs.BlockModel{ - { - Data: Rct1IPLD.RawData(), - CID: Rct1IPLD.Cid().String(), - }, - { - Data: Rct2IPLD.RawData(), - CID: Rct2IPLD.Cid().String(), - }, - { - Data: Rct3IPLD.RawData(), - CID: Rct3IPLD.Cid().String(), - }, - }, - StateNodes: []eth.StateNode{ - { - StateLeafKey: common.BytesToHash(ContractLeafKey), - Type: statediff.Leaf, - IPLD: ipfs.BlockModel{ - Data: State1IPLD.RawData(), - CID: State1IPLD.Cid().String(), - }, - Path: []byte{'\x06'}, - }, - { - StateLeafKey: common.BytesToHash(AccountLeafKey), - Type: statediff.Leaf, - IPLD: ipfs.BlockModel{ - Data: State2IPLD.RawData(), - CID: State2IPLD.Cid().String(), - }, - Path: []byte{'\x0c'}, - }, - }, - StorageNodes: []eth.StorageNode{ - { - StateLeafKey: common.BytesToHash(ContractLeafKey), - StorageLeafKey: common.BytesToHash(StorageLeafKey), - Type: statediff.Leaf, - IPLD: ipfs.BlockModel{ - Data: StorageIPLD.RawData(), - CID: StorageIPLD.Cid().String(), - }, - Path: []byte{}, - }, - }, - } -) - -// createTransactionsAndReceipts is a helper function to generate signed mock transactions and mock receipts with mock logs -func createTransactionsAndReceipts() (types.Transactions, types.Receipts, common.Address) { - // make transactions - trx1 := types.NewTransaction(0, Address, big.NewInt(1000), 50, big.NewInt(100), []byte{}) - trx2 := types.NewTransaction(1, AnotherAddress, big.NewInt(2000), 100, big.NewInt(200), []byte{}) - trx3 := types.NewContractCreation(2, big.NewInt(1500), 75, big.NewInt(150), MockContractByteCode) - transactionSigner := types.MakeSigner(params.MainnetChainConfig, new(big.Int).Set(BlockNumber)) - mockCurve := elliptic.P256() - mockPrvKey, err := ecdsa.GenerateKey(mockCurve, rand.Reader) - if err != nil { - log.Fatal(err) - } - signedTrx1, err := types.SignTx(trx1, transactionSigner, mockPrvKey) - if err != nil { - log.Fatal(err) - } - signedTrx2, err := types.SignTx(trx2, transactionSigner, mockPrvKey) - if err != nil { - log.Fatal(err) - } - signedTrx3, err := types.SignTx(trx3, transactionSigner, mockPrvKey) - if err != nil { - log.Fatal(err) - } - SenderAddr, err := types.Sender(transactionSigner, signedTrx1) // same for both trx - if err != nil { - log.Fatal(err) - } - // make receipts - mockReceipt1 := types.NewReceipt(common.HexToHash("0x0").Bytes(), false, 50) - mockReceipt1.Logs = []*types.Log{MockLog1} - mockReceipt1.TxHash = signedTrx1.Hash() - mockReceipt2 := types.NewReceipt(common.HexToHash("0x1").Bytes(), false, 100) - mockReceipt2.Logs = []*types.Log{MockLog2} - mockReceipt2.TxHash = signedTrx2.Hash() - mockReceipt3 := types.NewReceipt(common.HexToHash("0x2").Bytes(), false, 75) - mockReceipt3.Logs = []*types.Log{} - mockReceipt3.TxHash = signedTrx3.Hash() - return types.Transactions{signedTrx1, signedTrx2, signedTrx3}, types.Receipts{mockReceipt1, mockReceipt2, mockReceipt3}, SenderAddr -} diff --git a/pkg/eth/models.go b/pkg/eth/models.go deleted file mode 100644 index 6ded3a67..00000000 --- a/pkg/eth/models.go +++ /dev/null @@ -1,126 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package eth - -import "github.com/lib/pq" - -// HeaderModel is the db model for eth.header_cids -type HeaderModel struct { - ID int64 `db:"id"` - BlockNumber string `db:"block_number"` - BlockHash string `db:"block_hash"` - ParentHash string `db:"parent_hash"` - CID string `db:"cid"` - MhKey string `db:"mh_key"` - TotalDifficulty string `db:"td"` - NodeID int64 `db:"node_id"` - Reward string `db:"reward"` - StateRoot string `db:"state_root"` - UncleRoot string `db:"uncle_root"` - TxRoot string `db:"tx_root"` - RctRoot string `db:"receipt_root"` - Bloom []byte `db:"bloom"` - Timestamp uint64 `db:"timestamp"` - TimesValidated int64 `db:"times_validated"` -} - -// UncleModel is the db model for eth.uncle_cids -type UncleModel struct { - ID int64 `db:"id"` - HeaderID int64 `db:"header_id"` - BlockHash string `db:"block_hash"` - ParentHash string `db:"parent_hash"` - CID string `db:"cid"` - MhKey string `db:"mh_key"` - Reward string `db:"reward"` -} - -// TxModel is the db model for eth.transaction_cids -type TxModel struct { - ID int64 `db:"id"` - HeaderID int64 `db:"header_id"` - Index int64 `db:"index"` - TxHash string `db:"tx_hash"` - CID string `db:"cid"` - MhKey string `db:"mh_key"` - Dst string `db:"dst"` - Src string `db:"src"` - Data []byte `db:"tx_data"` - Deployment bool `db:"deployment"` -} - -// ReceiptModel is the db model for eth.receipt_cids -type ReceiptModel struct { - ID int64 `db:"id"` - TxID int64 `db:"tx_id"` - CID string `db:"cid"` - MhKey string `db:"mh_key"` - Contract string `db:"contract"` - ContractHash string `db:"contract_hash"` - LogContracts pq.StringArray `db:"log_contracts"` - Topic0s pq.StringArray `db:"topic0s"` - Topic1s pq.StringArray `db:"topic1s"` - Topic2s pq.StringArray `db:"topic2s"` - Topic3s pq.StringArray `db:"topic3s"` -} - -// StateNodeModel is the db model for eth.state_cids -type StateNodeModel struct { - ID int64 `db:"id"` - HeaderID int64 `db:"header_id"` - Path []byte `db:"state_path"` - StateKey string `db:"state_leaf_key"` - NodeType int `db:"node_type"` - CID string `db:"cid"` - MhKey string `db:"mh_key"` - Diff bool `db:"diff"` -} - -// StorageNodeModel is the db model for eth.storage_cids -type StorageNodeModel struct { - ID int64 `db:"id"` - StateID int64 `db:"state_id"` - Path []byte `db:"storage_path"` - StorageKey string `db:"storage_leaf_key"` - NodeType int `db:"node_type"` - CID string `db:"cid"` - MhKey string `db:"mh_key"` - Diff bool `db:"diff"` -} - -// StorageNodeWithStateKeyModel is a db model for eth.storage_cids + eth.state_cids.state_key -type StorageNodeWithStateKeyModel struct { - ID int64 `db:"id"` - StateID int64 `db:"state_id"` - Path []byte `db:"storage_path"` - StateKey string `db:"state_leaf_key"` - StorageKey string `db:"storage_leaf_key"` - NodeType int `db:"node_type"` - CID string `db:"cid"` - MhKey string `db:"mh_key"` - Diff bool `db:"diff"` -} - -// StateAccountModel is a db model for an eth state account (decoded value of state leaf node) -type StateAccountModel struct { - ID int64 `db:"id"` - StateID int64 `db:"state_id"` - Balance string `db:"balance"` - Nonce uint64 `db:"nonce"` - CodeHash []byte `db:"code_hash"` - StorageRoot string `db:"storage_root"` -} diff --git a/pkg/eth/payload_fetcher.go b/pkg/eth/payload_fetcher.go deleted file mode 100644 index ebc0d5a4..00000000 --- a/pkg/eth/payload_fetcher.go +++ /dev/null @@ -1,88 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package eth - -import ( - "context" - "fmt" - "time" - - "github.com/ethereum/go-ethereum/rpc" - "github.com/ethereum/go-ethereum/statediff" - - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/shared" -) - -// BatchClient is an interface to a batch-fetching geth rpc client; created to allow mock insertion -type BatchClient interface { - BatchCallContext(ctx context.Context, batch []rpc.BatchElem) error -} - -// PayloadFetcher satisfies the PayloadFetcher interface for ethereum -type PayloadFetcher struct { - // PayloadFetcher is thread-safe as long as the underlying client is thread-safe, since it has/modifies no other state - // http.Client is thread-safe - client BatchClient - timeout time.Duration - params statediff.Params -} - -const method = "statediff_stateDiffAt" - -// NewPayloadFetcher returns a PayloadFetcher -func NewPayloadFetcher(bc BatchClient, timeout time.Duration) *PayloadFetcher { - return &PayloadFetcher{ - client: bc, - timeout: timeout, - params: statediff.Params{ - IncludeReceipts: true, - IncludeTD: true, - IncludeBlock: true, - IntermediateStateNodes: true, - IntermediateStorageNodes: true, - }, - } -} - -// FetchAt fetches the statediff payloads at the given block heights -// Calls StateDiffAt(ctx context.Context, blockNumber uint64, params Params) (*Payload, error) -func (fetcher *PayloadFetcher) FetchAt(blockHeights []uint64) ([]shared.RawChainData, error) { - batch := make([]rpc.BatchElem, 0) - for _, height := range blockHeights { - batch = append(batch, rpc.BatchElem{ - Method: method, - Args: []interface{}{height, fetcher.params}, - Result: new(statediff.Payload), - }) - } - ctx, cancel := context.WithTimeout(context.Background(), fetcher.timeout) - defer cancel() - if err := fetcher.client.BatchCallContext(ctx, batch); err != nil { - return nil, fmt.Errorf("ethereum PayloadFetcher batch err for block range %d-%d: %s", blockHeights[0], blockHeights[len(blockHeights)-1], err.Error()) - } - results := make([]shared.RawChainData, 0, len(blockHeights)) - for _, batchElem := range batch { - if batchElem.Error != nil { - return nil, fmt.Errorf("ethereum PayloadFetcher err at blockheight %d: %s", batchElem.Args[0].(uint64), batchElem.Error.Error()) - } - payload, ok := batchElem.Result.(*statediff.Payload) - if ok { - results = append(results, *payload) - } - } - return results, nil -} diff --git a/pkg/eth/payload_fetcher_test.go b/pkg/eth/payload_fetcher_test.go deleted file mode 100644 index ac0ed1e0..00000000 --- a/pkg/eth/payload_fetcher_test.go +++ /dev/null @@ -1,65 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package eth_test - -import ( - "time" - - "github.com/ethereum/go-ethereum/statediff" - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/eth" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/eth/mocks" -) - -var _ = Describe("StateDiffFetcher", func() { - Describe("FetchStateDiffsAt", func() { - var ( - mc *mocks.BackFillerClient - stateDiffFetcher *eth.PayloadFetcher - payload2 statediff.Payload - blockNumber2 uint64 - ) - BeforeEach(func() { - mc = new(mocks.BackFillerClient) - err := mc.SetReturnDiffAt(mocks.BlockNumber.Uint64(), mocks.MockStateDiffPayload) - Expect(err).ToNot(HaveOccurred()) - payload2 = mocks.MockStateDiffPayload - payload2.BlockRlp = []byte{} - blockNumber2 = mocks.BlockNumber.Uint64() + 1 - err = mc.SetReturnDiffAt(blockNumber2, payload2) - Expect(err).ToNot(HaveOccurred()) - stateDiffFetcher = eth.NewPayloadFetcher(mc, time.Second*60) - }) - It("Batch calls statediff_stateDiffAt", func() { - blockHeights := []uint64{ - mocks.BlockNumber.Uint64(), - blockNumber2, - } - stateDiffPayloads, err := stateDiffFetcher.FetchAt(blockHeights) - Expect(err).ToNot(HaveOccurred()) - Expect(len(stateDiffPayloads)).To(Equal(2)) - payload1, ok := stateDiffPayloads[0].(statediff.Payload) - Expect(ok).To(BeTrue()) - payload2, ok := stateDiffPayloads[1].(statediff.Payload) - Expect(ok).To(BeTrue()) - Expect(payload1).To(Equal(mocks.MockStateDiffPayload)) - Expect(payload2).To(Equal(payload2)) - }) - }) -}) diff --git a/pkg/eth/publisher.go b/pkg/eth/publisher.go deleted file mode 100644 index e0eaa215..00000000 --- a/pkg/eth/publisher.go +++ /dev/null @@ -1,228 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package eth - -import ( - "fmt" - - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/core/state" - "github.com/ethereum/go-ethereum/rlp" - "github.com/ethereum/go-ethereum/statediff" - "github.com/jmoiron/sqlx" - "github.com/multiformats/go-multihash" - - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/ipfs/ipld" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/postgres" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/shared" -) - -// IPLDPublisher satisfies the IPLDPublisher interface for ethereum -// It interfaces directly with the public.blocks table of PG-IPFS rather than going through an ipfs intermediary -// It publishes and indexes IPLDs together in a single sqlx.Tx -type IPLDPublisher struct { - indexer *CIDIndexer -} - -// NewIPLDPublisher creates a pointer to a new IPLDPublisher which satisfies the IPLDPublisher interface -func NewIPLDPublisher(db *postgres.DB) *IPLDPublisher { - return &IPLDPublisher{ - indexer: NewCIDIndexer(db), - } -} - -// Publish publishes an IPLDPayload to IPFS and returns the corresponding CIDPayload -func (pub *IPLDPublisher) Publish(payload shared.ConvertedData) error { - ipldPayload, ok := payload.(ConvertedPayload) - if !ok { - return fmt.Errorf("eth IPLDPublisher expected payload type %T got %T", ConvertedPayload{}, payload) - } - // Generate the iplds - headerNode, uncleNodes, txNodes, txTrieNodes, rctNodes, rctTrieNodes, err := ipld.FromBlockAndReceipts(ipldPayload.Block, ipldPayload.Receipts) - if err != nil { - return err - } - - // Begin new db tx - tx, err := pub.indexer.db.Beginx() - if err != nil { - return err - } - defer func() { - if p := recover(); p != nil { - shared.Rollback(tx) - panic(p) - } else if err != nil { - shared.Rollback(tx) - } else { - err = tx.Commit() - } - }() - - // Publish trie nodes - for _, node := range txTrieNodes { - if err := shared.PublishIPLD(tx, node); err != nil { - return err - } - } - for _, node := range rctTrieNodes { - if err := shared.PublishIPLD(tx, node); err != nil { - return err - } - } - - // Publish and index header - if err := shared.PublishIPLD(tx, headerNode); err != nil { - return err - } - reward := CalcEthBlockReward(ipldPayload.Block.Header(), ipldPayload.Block.Uncles(), ipldPayload.Block.Transactions(), ipldPayload.Receipts) - header := HeaderModel{ - CID: headerNode.Cid().String(), - MhKey: shared.MultihashKeyFromCID(headerNode.Cid()), - ParentHash: ipldPayload.Block.ParentHash().String(), - BlockNumber: ipldPayload.Block.Number().String(), - BlockHash: ipldPayload.Block.Hash().String(), - TotalDifficulty: ipldPayload.TotalDifficulty.String(), - Reward: reward.String(), - Bloom: ipldPayload.Block.Bloom().Bytes(), - StateRoot: ipldPayload.Block.Root().String(), - RctRoot: ipldPayload.Block.ReceiptHash().String(), - TxRoot: ipldPayload.Block.TxHash().String(), - UncleRoot: ipldPayload.Block.UncleHash().String(), - Timestamp: ipldPayload.Block.Time(), - } - headerID, err := pub.indexer.indexHeaderCID(tx, header) - if err != nil { - return err - } - - // Publish and index uncles - for _, uncleNode := range uncleNodes { - if err := shared.PublishIPLD(tx, uncleNode); err != nil { - return err - } - uncleReward := CalcUncleMinerReward(ipldPayload.Block.Number().Int64(), uncleNode.Number.Int64()) - uncle := UncleModel{ - CID: uncleNode.Cid().String(), - MhKey: shared.MultihashKeyFromCID(uncleNode.Cid()), - ParentHash: uncleNode.ParentHash.String(), - BlockHash: uncleNode.Hash().String(), - Reward: uncleReward.String(), - } - if err := pub.indexer.indexUncleCID(tx, uncle, headerID); err != nil { - return err - } - } - - // Publish and index txs and receipts - for i, txNode := range txNodes { - if err := shared.PublishIPLD(tx, txNode); err != nil { - return err - } - rctNode := rctNodes[i] - if err := shared.PublishIPLD(tx, rctNode); err != nil { - return err - } - txModel := ipldPayload.TxMetaData[i] - txModel.CID = txNode.Cid().String() - txModel.MhKey = shared.MultihashKeyFromCID(txNode.Cid()) - txID, err := pub.indexer.indexTransactionCID(tx, txModel, headerID) - if err != nil { - return err - } - // If tx is a contract deployment, publish the data (code) - if txModel.Deployment { // codec doesn't matter in this case sine we are not interested in the cid and the db key is multihash-derived - if _, err = shared.PublishRaw(tx, ipld.MEthStorageTrie, multihash.KECCAK_256, txModel.Data); err != nil { - return err - } - } - rctModel := ipldPayload.ReceiptMetaData[i] - rctModel.CID = rctNode.Cid().String() - rctModel.MhKey = shared.MultihashKeyFromCID(rctNode.Cid()) - if err := pub.indexer.indexReceiptCID(tx, rctModel, txID); err != nil { - return err - } - } - - // Publish and index state and storage - err = pub.publishAndIndexStateAndStorage(tx, ipldPayload, headerID) - - return err // return err variable explicitly so that we return the err = tx.Commit() assignment in the defer -} - -func (pub *IPLDPublisher) publishAndIndexStateAndStorage(tx *sqlx.Tx, ipldPayload ConvertedPayload, headerID int64) error { - // Publish and index state and storage - for _, stateNode := range ipldPayload.StateNodes { - stateCIDStr, err := shared.PublishRaw(tx, ipld.MEthStateTrie, multihash.KECCAK_256, stateNode.Value) - if err != nil { - return err - } - mhKey, _ := shared.MultihashKeyFromCIDString(stateCIDStr) - stateModel := StateNodeModel{ - Path: stateNode.Path, - StateKey: stateNode.LeafKey.String(), - CID: stateCIDStr, - MhKey: mhKey, - NodeType: ResolveFromNodeType(stateNode.Type), - } - stateID, err := pub.indexer.indexStateCID(tx, stateModel, headerID) - if err != nil { - return err - } - // If we have a leaf, decode and index the account data and any associated storage diffs - if stateNode.Type == statediff.Leaf { - var i []interface{} - if err := rlp.DecodeBytes(stateNode.Value, &i); err != nil { - return err - } - if len(i) != 2 { - return fmt.Errorf("eth IPLDPublisher expected state leaf node rlp to decode into two elements") - } - var account state.Account - if err := rlp.DecodeBytes(i[1].([]byte), &account); err != nil { - return err - } - accountModel := StateAccountModel{ - Balance: account.Balance.String(), - Nonce: account.Nonce, - CodeHash: account.CodeHash, - StorageRoot: account.Root.String(), - } - if err := pub.indexer.indexStateAccount(tx, accountModel, stateID); err != nil { - return err - } - for _, storageNode := range ipldPayload.StorageNodes[common.Bytes2Hex(stateNode.Path)] { - storageCIDStr, err := shared.PublishRaw(tx, ipld.MEthStorageTrie, multihash.KECCAK_256, storageNode.Value) - if err != nil { - return err - } - mhKey, _ := shared.MultihashKeyFromCIDString(storageCIDStr) - storageModel := StorageNodeModel{ - Path: storageNode.Path, - StorageKey: storageNode.LeafKey.Hex(), - CID: storageCIDStr, - MhKey: mhKey, - NodeType: ResolveFromNodeType(storageNode.Type), - } - if err := pub.indexer.indexStorageCID(tx, storageModel, stateID); err != nil { - return err - } - } - } - } - return nil -} diff --git a/pkg/eth/publisher_test.go b/pkg/eth/publisher_test.go deleted file mode 100644 index 559e9b5a..00000000 --- a/pkg/eth/publisher_test.go +++ /dev/null @@ -1,233 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package eth_test - -import ( - "github.com/ethereum/go-ethereum/common" - "github.com/ipfs/go-cid" - "github.com/ipfs/go-ipfs-blockstore" - "github.com/ipfs/go-ipfs-ds-help" - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/eth" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/eth/mocks" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/postgres" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/shared" -) - -var _ = Describe("PublishAndIndexer", func() { - var ( - db *postgres.DB - err error - repo *eth.IPLDPublisher - ipfsPgGet = `SELECT data FROM public.blocks - WHERE key = $1` - ) - BeforeEach(func() { - db, err = shared.SetupDB() - Expect(err).ToNot(HaveOccurred()) - repo = eth.NewIPLDPublisher(db) - }) - AfterEach(func() { - eth.TearDownDB(db) - }) - - Describe("Publish", func() { - It("Published and indexes header IPLDs in a single tx", func() { - err = repo.Publish(mocks.MockConvertedPayload) - Expect(err).ToNot(HaveOccurred()) - pgStr := `SELECT cid, td, reward, id - FROM eth.header_cids - WHERE block_number = $1` - // check header was properly indexed - type res struct { - CID string - TD string - Reward string - ID int - } - header := new(res) - err = db.QueryRowx(pgStr, 1).StructScan(header) - Expect(err).ToNot(HaveOccurred()) - Expect(header.CID).To(Equal(mocks.HeaderCID.String())) - Expect(header.TD).To(Equal(mocks.MockBlock.Difficulty().String())) - Expect(header.Reward).To(Equal("5000000000000000000")) - dc, err := cid.Decode(header.CID) - Expect(err).ToNot(HaveOccurred()) - mhKey := dshelp.MultihashToDsKey(dc.Hash()) - prefixedKey := blockstore.BlockPrefix.String() + mhKey.String() - var data []byte - err = db.Get(&data, ipfsPgGet, prefixedKey) - Expect(err).ToNot(HaveOccurred()) - Expect(data).To(Equal(mocks.MockHeaderRlp)) - }) - - It("Publishes and indexes transaction IPLDs in a single tx", func() { - err = repo.Publish(mocks.MockConvertedPayload) - Expect(err).ToNot(HaveOccurred()) - // check that txs were properly indexed - trxs := make([]string, 0) - pgStr := `SELECT transaction_cids.cid FROM eth.transaction_cids INNER JOIN eth.header_cids ON (transaction_cids.header_id = header_cids.id) - WHERE header_cids.block_number = $1` - err = db.Select(&trxs, pgStr, 1) - Expect(err).ToNot(HaveOccurred()) - Expect(len(trxs)).To(Equal(3)) - Expect(shared.ListContainsString(trxs, mocks.Trx1CID.String())).To(BeTrue()) - Expect(shared.ListContainsString(trxs, mocks.Trx2CID.String())).To(BeTrue()) - Expect(shared.ListContainsString(trxs, mocks.Trx3CID.String())).To(BeTrue()) - // and published - for _, c := range trxs { - dc, err := cid.Decode(c) - Expect(err).ToNot(HaveOccurred()) - mhKey := dshelp.MultihashToDsKey(dc.Hash()) - prefixedKey := blockstore.BlockPrefix.String() + mhKey.String() - var data []byte - err = db.Get(&data, ipfsPgGet, prefixedKey) - Expect(err).ToNot(HaveOccurred()) - switch c { - case mocks.Trx1CID.String(): - Expect(data).To(Equal(mocks.MockTransactions.GetRlp(0))) - case mocks.Trx2CID.String(): - Expect(data).To(Equal(mocks.MockTransactions.GetRlp(1))) - case mocks.Trx3CID.String(): - Expect(data).To(Equal(mocks.MockTransactions.GetRlp(2))) - } - } - }) - - It("Publishes and indexes receipt IPLDs in a single tx", func() { - err = repo.Publish(mocks.MockConvertedPayload) - Expect(err).ToNot(HaveOccurred()) - // check receipts were properly indexed - rcts := make([]string, 0) - pgStr := `SELECT receipt_cids.cid FROM eth.receipt_cids, eth.transaction_cids, eth.header_cids - WHERE receipt_cids.tx_id = transaction_cids.id - AND transaction_cids.header_id = header_cids.id - AND header_cids.block_number = $1` - err = db.Select(&rcts, pgStr, 1) - Expect(err).ToNot(HaveOccurred()) - Expect(len(rcts)).To(Equal(3)) - Expect(shared.ListContainsString(rcts, mocks.Rct1CID.String())).To(BeTrue()) - Expect(shared.ListContainsString(rcts, mocks.Rct2CID.String())).To(BeTrue()) - Expect(shared.ListContainsString(rcts, mocks.Rct3CID.String())).To(BeTrue()) - // and published - for _, c := range rcts { - dc, err := cid.Decode(c) - Expect(err).ToNot(HaveOccurred()) - mhKey := dshelp.MultihashToDsKey(dc.Hash()) - prefixedKey := blockstore.BlockPrefix.String() + mhKey.String() - var data []byte - err = db.Get(&data, ipfsPgGet, prefixedKey) - Expect(err).ToNot(HaveOccurred()) - switch c { - case mocks.Rct1CID.String(): - Expect(data).To(Equal(mocks.MockReceipts.GetRlp(0))) - case mocks.Rct2CID.String(): - Expect(data).To(Equal(mocks.MockReceipts.GetRlp(1))) - case mocks.Rct3CID.String(): - Expect(data).To(Equal(mocks.MockReceipts.GetRlp(2))) - } - } - }) - - It("Publishes and indexes state IPLDs in a single tx", func() { - err = repo.Publish(mocks.MockConvertedPayload) - Expect(err).ToNot(HaveOccurred()) - // check that state nodes were properly indexed and published - stateNodes := make([]eth.StateNodeModel, 0) - pgStr := `SELECT state_cids.id, state_cids.cid, state_cids.state_leaf_key, state_cids.node_type, state_cids.state_path, state_cids.header_id - FROM eth.state_cids INNER JOIN eth.header_cids ON (state_cids.header_id = header_cids.id) - WHERE header_cids.block_number = $1` - err = db.Select(&stateNodes, pgStr, 1) - Expect(err).ToNot(HaveOccurred()) - Expect(len(stateNodes)).To(Equal(2)) - for _, stateNode := range stateNodes { - var data []byte - dc, err := cid.Decode(stateNode.CID) - Expect(err).ToNot(HaveOccurred()) - mhKey := dshelp.MultihashToDsKey(dc.Hash()) - prefixedKey := blockstore.BlockPrefix.String() + mhKey.String() - err = db.Get(&data, ipfsPgGet, prefixedKey) - Expect(err).ToNot(HaveOccurred()) - pgStr = `SELECT * from eth.state_accounts WHERE state_id = $1` - var account eth.StateAccountModel - err = db.Get(&account, pgStr, stateNode.ID) - Expect(err).ToNot(HaveOccurred()) - if stateNode.CID == mocks.State1CID.String() { - Expect(stateNode.NodeType).To(Equal(2)) - Expect(stateNode.StateKey).To(Equal(common.BytesToHash(mocks.ContractLeafKey).Hex())) - Expect(stateNode.Path).To(Equal([]byte{'\x06'})) - Expect(data).To(Equal(mocks.ContractLeafNode)) - Expect(account).To(Equal(eth.StateAccountModel{ - ID: account.ID, - StateID: stateNode.ID, - Balance: "0", - CodeHash: mocks.ContractCodeHash.Bytes(), - StorageRoot: mocks.ContractRoot, - Nonce: 1, - })) - } - if stateNode.CID == mocks.State2CID.String() { - Expect(stateNode.NodeType).To(Equal(2)) - Expect(stateNode.StateKey).To(Equal(common.BytesToHash(mocks.AccountLeafKey).Hex())) - Expect(stateNode.Path).To(Equal([]byte{'\x0c'})) - Expect(data).To(Equal(mocks.AccountLeafNode)) - Expect(account).To(Equal(eth.StateAccountModel{ - ID: account.ID, - StateID: stateNode.ID, - Balance: "1000", - CodeHash: mocks.AccountCodeHash.Bytes(), - StorageRoot: mocks.AccountRoot, - Nonce: 0, - })) - } - } - pgStr = `SELECT * from eth.state_accounts WHERE state_id = $1` - }) - - It("Publishes and indexes storage IPLDs in a single tx", func() { - err = repo.Publish(mocks.MockConvertedPayload) - Expect(err).ToNot(HaveOccurred()) - // check that storage nodes were properly indexed - storageNodes := make([]eth.StorageNodeWithStateKeyModel, 0) - pgStr := `SELECT storage_cids.cid, state_cids.state_leaf_key, storage_cids.storage_leaf_key, storage_cids.node_type, storage_cids.storage_path - FROM eth.storage_cids, eth.state_cids, eth.header_cids - WHERE storage_cids.state_id = state_cids.id - AND state_cids.header_id = header_cids.id - AND header_cids.block_number = $1` - err = db.Select(&storageNodes, pgStr, 1) - Expect(err).ToNot(HaveOccurred()) - Expect(len(storageNodes)).To(Equal(1)) - Expect(storageNodes[0]).To(Equal(eth.StorageNodeWithStateKeyModel{ - CID: mocks.StorageCID.String(), - NodeType: 2, - StorageKey: common.BytesToHash(mocks.StorageLeafKey).Hex(), - StateKey: common.BytesToHash(mocks.ContractLeafKey).Hex(), - Path: []byte{}, - })) - var data []byte - dc, err := cid.Decode(storageNodes[0].CID) - Expect(err).ToNot(HaveOccurred()) - mhKey := dshelp.MultihashToDsKey(dc.Hash()) - prefixedKey := blockstore.BlockPrefix.String() + mhKey.String() - err = db.Get(&data, ipfsPgGet, prefixedKey) - Expect(err).ToNot(HaveOccurred()) - Expect(data).To(Equal(mocks.StorageLeafNode)) - }) - }) -}) diff --git a/pkg/eth/reward.go b/pkg/eth/reward.go deleted file mode 100644 index 3949933d..00000000 --- a/pkg/eth/reward.go +++ /dev/null @@ -1,76 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package eth - -import ( - "math/big" - - "github.com/ethereum/go-ethereum/core/types" -) - -func CalcEthBlockReward(header *types.Header, uncles []*types.Header, txs types.Transactions, receipts types.Receipts) *big.Int { - staticBlockReward := staticRewardByBlockNumber(header.Number.Int64()) - transactionFees := calcEthTransactionFees(txs, receipts) - uncleInclusionRewards := calcEthUncleInclusionRewards(header, uncles) - tmp := transactionFees.Add(transactionFees, uncleInclusionRewards) - return tmp.Add(tmp, staticBlockReward) -} - -func CalcUncleMinerReward(blockNumber, uncleBlockNumber int64) *big.Int { - staticBlockReward := staticRewardByBlockNumber(blockNumber) - rewardDiv8 := staticBlockReward.Div(staticBlockReward, big.NewInt(8)) - mainBlock := big.NewInt(blockNumber) - uncleBlock := big.NewInt(uncleBlockNumber) - uncleBlockPlus8 := uncleBlock.Add(uncleBlock, big.NewInt(8)) - uncleBlockPlus8MinusMainBlock := uncleBlockPlus8.Sub(uncleBlockPlus8, mainBlock) - return rewardDiv8.Mul(rewardDiv8, uncleBlockPlus8MinusMainBlock) -} - -func staticRewardByBlockNumber(blockNumber int64) *big.Int { - staticBlockReward := new(big.Int) - //https://blog.ethereum.org/2017/10/12/byzantium-hf-announcement/ - if blockNumber >= 7280000 { - staticBlockReward.SetString("2000000000000000000", 10) - } else if blockNumber >= 4370000 { - staticBlockReward.SetString("3000000000000000000", 10) - } else { - staticBlockReward.SetString("5000000000000000000", 10) - } - return staticBlockReward -} - -func calcEthTransactionFees(txs types.Transactions, receipts types.Receipts) *big.Int { - transactionFees := new(big.Int) - for i, transaction := range txs { - receipt := receipts[i] - gasPrice := big.NewInt(transaction.GasPrice().Int64()) - gasUsed := big.NewInt(int64(receipt.GasUsed)) - transactionFee := gasPrice.Mul(gasPrice, gasUsed) - transactionFees = transactionFees.Add(transactionFees, transactionFee) - } - return transactionFees -} - -func calcEthUncleInclusionRewards(header *types.Header, uncles []*types.Header) *big.Int { - uncleInclusionRewards := new(big.Int) - for range uncles { - staticBlockReward := staticRewardByBlockNumber(header.Number.Int64()) - staticBlockReward.Div(staticBlockReward, big.NewInt(32)) - uncleInclusionRewards.Add(uncleInclusionRewards, staticBlockReward) - } - return uncleInclusionRewards -} diff --git a/pkg/eth/streamer.go b/pkg/eth/streamer.go deleted file mode 100644 index 2ba28e63..00000000 --- a/pkg/eth/streamer.go +++ /dev/null @@ -1,72 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package eth - -import ( - "context" - - "github.com/ethereum/go-ethereum/rpc" - "github.com/ethereum/go-ethereum/statediff" - "github.com/sirupsen/logrus" - - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/shared" -) - -const ( - PayloadChanBufferSize = 20000 // the max eth sub buffer size -) - -// StreamClient is an interface for subscribing and streaming from geth -type StreamClient interface { - Subscribe(ctx context.Context, namespace string, payloadChan interface{}, args ...interface{}) (*rpc.ClientSubscription, error) -} - -// PayloadStreamer satisfies the PayloadStreamer interface for ethereum -type PayloadStreamer struct { - Client StreamClient - params statediff.Params -} - -// NewPayloadStreamer creates a pointer to a new PayloadStreamer which satisfies the PayloadStreamer interface for ethereum -func NewPayloadStreamer(client StreamClient) *PayloadStreamer { - return &PayloadStreamer{ - Client: client, - params: statediff.Params{ - IncludeBlock: true, - IncludeTD: true, - IncludeReceipts: true, - IntermediateStorageNodes: true, - IntermediateStateNodes: true, - }, - } -} - -// Stream is the main loop for subscribing to data from the Geth state diff process -// Satisfies the shared.PayloadStreamer interface -func (ps *PayloadStreamer) Stream(payloadChan chan shared.RawChainData) (shared.ClientSubscription, error) { - stateDiffChan := make(chan statediff.Payload, PayloadChanBufferSize) - logrus.Debug("streaming diffs from geth") - go func() { - for { - select { - case payload := <-stateDiffChan: - payloadChan <- payload - } - } - }() - return ps.Client.Subscribe(context.Background(), "statediff", stateDiffChan, "stream", ps.params) -} diff --git a/pkg/eth/streamer_test.go b/pkg/eth/streamer_test.go deleted file mode 100644 index bf5f7c27..00000000 --- a/pkg/eth/streamer_test.go +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright 2019 Vulcanize -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package eth_test - -import ( - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/eth" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/eth/mocks" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/shared" -) - -var _ = Describe("StateDiff Streamer", func() { - It("subscribes to the geth statediff service", func() { - client := &mocks.StreamClient{} - streamer := eth.NewPayloadStreamer(client) - payloadChan := make(chan shared.RawChainData) - _, err := streamer.Stream(payloadChan) - Expect(err).NotTo(HaveOccurred()) - }) -}) diff --git a/pkg/eth/types.go b/pkg/eth/types.go deleted file mode 100644 index 5be0d697..00000000 --- a/pkg/eth/types.go +++ /dev/null @@ -1,112 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package eth - -import ( - "math/big" - - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/core/types" - "github.com/ethereum/go-ethereum/statediff" - - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/ipfs" -) - -// ConvertedPayload is a custom type which packages raw ETH data for publishing to IPFS and filtering to subscribers -// Returned by PayloadConverter -// Passed to IPLDPublisher and ResponseFilterer -type ConvertedPayload struct { - TotalDifficulty *big.Int - Block *types.Block - TxMetaData []TxModel - Receipts types.Receipts - ReceiptMetaData []ReceiptModel - StateNodes []TrieNode - StorageNodes map[string][]TrieNode -} - -// Height satisfies the StreamedIPLDs interface -func (i ConvertedPayload) Height() int64 { - return i.Block.Number().Int64() -} - -// Trie struct used to flag node as leaf or not -type TrieNode struct { - Path []byte - LeafKey common.Hash - Value []byte - Type statediff.NodeType -} - -// CIDPayload is a struct to hold all the CIDs and their associated meta data for indexing in Postgres -// Returned by IPLDPublisher -// Passed to CIDIndexer -type CIDPayload struct { - HeaderCID HeaderModel - UncleCIDs []UncleModel - TransactionCIDs []TxModel - ReceiptCIDs map[common.Hash]ReceiptModel - StateNodeCIDs []StateNodeModel - StateAccounts map[string]StateAccountModel - StorageNodeCIDs map[string][]StorageNodeModel -} - -// CIDWrapper is used to direct fetching of IPLDs from IPFS -// Returned by CIDRetriever -// Passed to IPLDFetcher -type CIDWrapper struct { - BlockNumber *big.Int - Header HeaderModel - Uncles []UncleModel - Transactions []TxModel - Receipts []ReceiptModel - StateNodes []StateNodeModel - StorageNodes []StorageNodeWithStateKeyModel -} - -// IPLDs is used to package raw IPLD block data fetched from IPFS and returned by the server -// Returned by IPLDFetcher and ResponseFilterer -type IPLDs struct { - BlockNumber *big.Int - TotalDifficulty *big.Int - Header ipfs.BlockModel - Uncles []ipfs.BlockModel - Transactions []ipfs.BlockModel - Receipts []ipfs.BlockModel - StateNodes []StateNode - StorageNodes []StorageNode -} - -// Height satisfies the StreamedIPLDs interface -func (i IPLDs) Height() int64 { - return i.BlockNumber.Int64() -} - -type StateNode struct { - Type statediff.NodeType - StateLeafKey common.Hash - Path []byte - IPLD ipfs.BlockModel -} - -type StorageNode struct { - Type statediff.NodeType - StateLeafKey common.Hash - StorageLeafKey common.Hash - Path []byte - IPLD ipfs.BlockModel -} diff --git a/pkg/historical/config.go b/pkg/historical/config.go deleted file mode 100644 index c435f187..00000000 --- a/pkg/historical/config.go +++ /dev/null @@ -1,135 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package historical - -import ( - "fmt" - "time" - - "github.com/spf13/viper" - - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/config" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/node" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/postgres" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/shared" - "github.com/vulcanize/ipfs-blockchain-watcher/utils" -) - -// Env variables -const ( - SUPERNODE_CHAIN = "SUPERNODE_CHAIN" - SUPERNODE_FREQUENCY = "SUPERNODE_FREQUENCY" - SUPERNODE_BATCH_SIZE = "SUPERNODE_BATCH_SIZE" - SUPERNODE_BATCH_NUMBER = "SUPERNODE_BATCH_NUMBER" - SUPERNODE_VALIDATION_LEVEL = "SUPERNODE_VALIDATION_LEVEL" - - BACKFILL_MAX_IDLE_CONNECTIONS = "BACKFILL_MAX_IDLE_CONNECTIONS" - BACKFILL_MAX_OPEN_CONNECTIONS = "BACKFILL_MAX_OPEN_CONNECTIONS" - BACKFILL_MAX_CONN_LIFETIME = "BACKFILL_MAX_CONN_LIFETIME" -) - -// Config struct -type Config struct { - Chain shared.ChainType - DBConfig config.Database - - DB *postgres.DB - HTTPClient interface{} - Frequency time.Duration - BatchSize uint64 - BatchNumber uint64 - ValidationLevel int - Timeout time.Duration // HTTP connection timeout in seconds - NodeInfo node.Node -} - -// NewConfig is used to initialize a historical config from a .toml file -func NewConfig() (*Config, error) { - c := new(Config) - var err error - - viper.BindEnv("watcher.chain", SUPERNODE_CHAIN) - chain := viper.GetString("watcher.chain") - c.Chain, err = shared.NewChainType(chain) - if err != nil { - return nil, err - } - - c.DBConfig.Init() - if err := c.init(); err != nil { - return nil, err - } - - return c, nil -} - -func (c *Config) init() error { - var err error - - viper.BindEnv("ethereum.httpPath", shared.ETH_HTTP_PATH) - viper.BindEnv("bitcoin.httpPath", shared.BTC_HTTP_PATH) - viper.BindEnv("watcher.frequency", SUPERNODE_FREQUENCY) - viper.BindEnv("watcher.batchSize", SUPERNODE_BATCH_SIZE) - viper.BindEnv("watcher.batchNumber", SUPERNODE_BATCH_NUMBER) - viper.BindEnv("watcher.validationLevel", SUPERNODE_VALIDATION_LEVEL) - viper.BindEnv("watcher.timeout", shared.HTTP_TIMEOUT) - - timeout := viper.GetInt("watcher.timeout") - if timeout < 15 { - timeout = 15 - } - c.Timeout = time.Second * time.Duration(timeout) - - switch c.Chain { - case shared.Ethereum: - ethHTTP := viper.GetString("ethereum.httpPath") - c.NodeInfo, c.HTTPClient, err = shared.GetEthNodeAndClient(fmt.Sprintf("http://%s", ethHTTP)) - if err != nil { - return err - } - case shared.Bitcoin: - btcHTTP := viper.GetString("bitcoin.httpPath") - c.NodeInfo, c.HTTPClient = shared.GetBtcNodeAndClient(btcHTTP) - } - - freq := viper.GetInt("watcher.frequency") - var frequency time.Duration - if freq <= 0 { - frequency = time.Second * 30 - } else { - frequency = time.Second * time.Duration(freq) - } - c.Frequency = frequency - c.BatchSize = uint64(viper.GetInt64("watcher.batchSize")) - c.BatchNumber = uint64(viper.GetInt64("watcher.batchNumber")) - c.ValidationLevel = viper.GetInt("watcher.validationLevel") - - dbConn := overrideDBConnConfig(c.DBConfig) - db := utils.LoadPostgres(dbConn, c.NodeInfo) - c.DB = &db - return nil -} - -func overrideDBConnConfig(con config.Database) config.Database { - viper.BindEnv("database.backFill.maxIdle", BACKFILL_MAX_IDLE_CONNECTIONS) - viper.BindEnv("database.backFill.maxOpen", BACKFILL_MAX_OPEN_CONNECTIONS) - viper.BindEnv("database.backFill.maxLifetime", BACKFILL_MAX_CONN_LIFETIME) - con.MaxIdle = viper.GetInt("database.backFill.maxIdle") - con.MaxOpen = viper.GetInt("database.backFill.maxOpen") - con.MaxLifetime = viper.GetInt("database.backFill.maxLifetime") - return con -} diff --git a/pkg/historical/historical_suite_test.go b/pkg/historical/historical_suite_test.go deleted file mode 100644 index 835abd05..00000000 --- a/pkg/historical/historical_suite_test.go +++ /dev/null @@ -1,35 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package historical_test - -import ( - "io/ioutil" - "testing" - - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - "github.com/sirupsen/logrus" -) - -func TestIPFSWatcher(t *testing.T) { - RegisterFailHandler(Fail) - RunSpecs(t, "IPFS Watcher Historical Suite Test") -} - -var _ = BeforeSuite(func() { - logrus.SetOutput(ioutil.Discard) -}) diff --git a/pkg/historical/service.go b/pkg/historical/service.go deleted file mode 100644 index 95173435..00000000 --- a/pkg/historical/service.go +++ /dev/null @@ -1,196 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package historical - -import ( - "sync" - "time" - - log "github.com/sirupsen/logrus" - - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/builders" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/shared" - "github.com/vulcanize/ipfs-blockchain-watcher/utils" -) - -// BackFillInterface for filling in gaps in the ipfs-blockchain-watcher db -type BackFillInterface interface { - // Method for the watcher to periodically check for and fill in gaps in its data using an archival node - BackFill(wg *sync.WaitGroup) - Stop() error -} - -// BackFillService for filling in gaps in the watcher -type BackFillService struct { - // Interface for converting payloads into IPLD object payloads - Converter shared.PayloadConverter - // Interface for publishing the IPLD payloads to IPFS - Publisher shared.IPLDPublisher - // Interface for searching and retrieving CIDs from Postgres index - Retriever shared.CIDRetriever - // Interface for fetching payloads over at historical blocks; over http - Fetcher shared.PayloadFetcher - // Channel for forwarding backfill payloads to the ScreenAndServe process - ScreenAndServeChan chan shared.ConvertedData - // Check frequency - GapCheckFrequency time.Duration - // Size of batch fetches - BatchSize uint64 - // Number of goroutines - BatchNumber int64 - // Channel for receiving quit signal - QuitChan chan bool - // Chain type - chain shared.ChainType - // Headers with times_validated lower than this will be resynced - validationLevel int -} - -// NewBackFillService returns a new BackFillInterface -func NewBackFillService(settings *Config, screenAndServeChan chan shared.ConvertedData) (BackFillInterface, error) { - publisher, err := builders.NewIPLDPublisher(settings.Chain, settings.DB) - if err != nil { - return nil, err - } - converter, err := builders.NewPayloadConverter(settings.Chain, settings.NodeInfo.ChainID) - if err != nil { - return nil, err - } - retriever, err := builders.NewCIDRetriever(settings.Chain, settings.DB) - if err != nil { - return nil, err - } - fetcher, err := builders.NewPaylaodFetcher(settings.Chain, settings.HTTPClient, settings.Timeout) - if err != nil { - return nil, err - } - batchSize := settings.BatchSize - if batchSize == 0 { - batchSize = shared.DefaultMaxBatchSize - } - batchNumber := int64(settings.BatchNumber) - if batchNumber == 0 { - batchNumber = shared.DefaultMaxBatchNumber - } - return &BackFillService{ - Converter: converter, - Publisher: publisher, - Retriever: retriever, - Fetcher: fetcher, - GapCheckFrequency: settings.Frequency, - BatchSize: batchSize, - BatchNumber: int64(batchNumber), - ScreenAndServeChan: screenAndServeChan, - QuitChan: make(chan bool), - chain: settings.Chain, - validationLevel: settings.ValidationLevel, - }, nil -} - -// BackFill periodically checks for and fills in gaps in the watcher db -func (bfs *BackFillService) BackFill(wg *sync.WaitGroup) { - ticker := time.NewTicker(bfs.GapCheckFrequency) - go func() { - wg.Add(1) - defer wg.Done() - for { - select { - case <-bfs.QuitChan: - log.Infof("quiting %s BackFill process", bfs.chain.String()) - return - case <-ticker.C: - gaps, err := bfs.Retriever.RetrieveGapsInData(bfs.validationLevel) - if err != nil { - log.Errorf("%s watcher db backFill RetrieveGapsInData error: %v", bfs.chain.String(), err) - continue - } - // spin up worker goroutines for this search pass - // we start and kill a new batch of workers for each pass - // so that we know each of the previous workers is done before we search for new gaps - heightsChan := make(chan []uint64) - for i := 1; i <= int(bfs.BatchNumber); i++ { - go bfs.backFill(wg, i, heightsChan) - } - for _, gap := range gaps { - log.Infof("backFilling %s data from %d to %d", bfs.chain.String(), gap.Start, gap.Stop) - blockRangeBins, err := utils.GetBlockHeightBins(gap.Start, gap.Stop, bfs.BatchSize) - if err != nil { - log.Errorf("%s watcher db backFill GetBlockHeightBins error: %v", bfs.chain.String(), err) - continue - } - for _, heights := range blockRangeBins { - select { - case <-bfs.QuitChan: - log.Infof("quiting %s BackFill process", bfs.chain.String()) - return - default: - heightsChan <- heights - } - } - } - // send a quit signal to each worker - // this blocks until each worker has finished its current task and is free to receive from the quit channel - for i := 1; i <= int(bfs.BatchNumber); i++ { - bfs.QuitChan <- true - } - } - } - }() - log.Infof("%s BackFill goroutine successfully spun up", bfs.chain.String()) -} - -func (bfs *BackFillService) backFill(wg *sync.WaitGroup, id int, heightChan chan []uint64) { - wg.Add(1) - defer wg.Done() - for { - select { - case heights := <-heightChan: - log.Debugf("%s backFill worker %d processing section from %d to %d", bfs.chain.String(), id, heights[0], heights[len(heights)-1]) - payloads, err := bfs.Fetcher.FetchAt(heights) - if err != nil { - log.Errorf("%s backFill worker %d fetcher error: %s", bfs.chain.String(), id, err.Error()) - } - for _, payload := range payloads { - ipldPayload, err := bfs.Converter.Convert(payload) - if err != nil { - log.Errorf("%s backFill worker %d converter error: %s", bfs.chain.String(), id, err.Error()) - } - // If there is a ScreenAndServe process listening, forward converted payload to it - select { - case bfs.ScreenAndServeChan <- ipldPayload: - log.Debugf("%s backFill worker %d forwarded converted payload to server", bfs.chain.String(), id) - default: - log.Debugf("%s backFill worker %d unable to forward converted payload to server; no channel ready to receive", bfs.chain.String(), id) - } - if err := bfs.Publisher.Publish(ipldPayload); err != nil { - log.Errorf("%s backFill worker %d publisher error: %s", bfs.chain.String(), id, err.Error()) - continue - } - } - log.Infof("%s backFill worker %d finished section from %d to %d", bfs.chain.String(), id, heights[0], heights[len(heights)-1]) - case <-bfs.QuitChan: - log.Infof("%s backFill worker %d shutting down", bfs.chain.String(), id) - return - } - } -} - -func (bfs *BackFillService) Stop() error { - log.Infof("Stopping %s backFill service", bfs.chain.String()) - close(bfs.QuitChan) - return nil -} diff --git a/pkg/historical/service_test.go b/pkg/historical/service_test.go deleted file mode 100644 index 51cf3e23..00000000 --- a/pkg/historical/service_test.go +++ /dev/null @@ -1,180 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package historical_test - -import ( - "sync" - "time" - - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/eth" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/eth/mocks" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/historical" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/shared" - mocks2 "github.com/vulcanize/ipfs-blockchain-watcher/pkg/shared/mocks" -) - -var _ = Describe("BackFiller", func() { - Describe("FillGaps", func() { - It("Periodically checks for and fills in gaps in the watcher's data", func() { - mockPublisher := &mocks.IterativeIPLDPublisher{ - ReturnCIDPayload: []*eth.CIDPayload{mocks.MockCIDPayload, mocks.MockCIDPayload}, - ReturnErr: nil, - } - mockConverter := &mocks.IterativePayloadConverter{ - ReturnIPLDPayload: []eth.ConvertedPayload{mocks.MockConvertedPayload, mocks.MockConvertedPayload}, - ReturnErr: nil, - } - mockRetriever := &mocks2.CIDRetriever{ - FirstBlockNumberToReturn: 0, - GapsToRetrieve: []shared.Gap{ - { - Start: 100, Stop: 101, - }, - }, - } - mockFetcher := &mocks2.PayloadFetcher{ - PayloadsToReturn: map[uint64]shared.RawChainData{ - 100: mocks.MockStateDiffPayload, - 101: mocks.MockStateDiffPayload, - }, - } - quitChan := make(chan bool, 1) - backfiller := &historical.BackFillService{ - Publisher: mockPublisher, - Converter: mockConverter, - Fetcher: mockFetcher, - Retriever: mockRetriever, - GapCheckFrequency: time.Second * 2, - BatchSize: shared.DefaultMaxBatchSize, - BatchNumber: shared.DefaultMaxBatchNumber, - QuitChan: quitChan, - } - wg := &sync.WaitGroup{} - backfiller.BackFill(wg) - time.Sleep(time.Second * 3) - quitChan <- true - Expect(len(mockPublisher.PassedIPLDPayload)).To(Equal(2)) - Expect(mockPublisher.PassedIPLDPayload[0]).To(Equal(mocks.MockConvertedPayload)) - Expect(mockPublisher.PassedIPLDPayload[1]).To(Equal(mocks.MockConvertedPayload)) - Expect(len(mockConverter.PassedStatediffPayload)).To(Equal(2)) - Expect(mockConverter.PassedStatediffPayload[0]).To(Equal(mocks.MockStateDiffPayload)) - Expect(mockConverter.PassedStatediffPayload[1]).To(Equal(mocks.MockStateDiffPayload)) - Expect(mockRetriever.CalledTimes).To(Equal(1)) - Expect(len(mockFetcher.CalledAtBlockHeights)).To(Equal(1)) - Expect(mockFetcher.CalledAtBlockHeights[0]).To(Equal([]uint64{100, 101})) - }) - - It("Works for single block `ranges`", func() { - mockPublisher := &mocks.IterativeIPLDPublisher{ - ReturnCIDPayload: []*eth.CIDPayload{mocks.MockCIDPayload}, - ReturnErr: nil, - } - mockConverter := &mocks.IterativePayloadConverter{ - ReturnIPLDPayload: []eth.ConvertedPayload{mocks.MockConvertedPayload}, - ReturnErr: nil, - } - mockRetriever := &mocks2.CIDRetriever{ - FirstBlockNumberToReturn: 0, - GapsToRetrieve: []shared.Gap{ - { - Start: 100, Stop: 100, - }, - }, - } - mockFetcher := &mocks2.PayloadFetcher{ - PayloadsToReturn: map[uint64]shared.RawChainData{ - 100: mocks.MockStateDiffPayload, - }, - } - quitChan := make(chan bool, 1) - backfiller := &historical.BackFillService{ - Publisher: mockPublisher, - Converter: mockConverter, - Fetcher: mockFetcher, - Retriever: mockRetriever, - GapCheckFrequency: time.Second * 2, - BatchSize: shared.DefaultMaxBatchSize, - BatchNumber: shared.DefaultMaxBatchNumber, - QuitChan: quitChan, - } - wg := &sync.WaitGroup{} - backfiller.BackFill(wg) - time.Sleep(time.Second * 3) - quitChan <- true - Expect(len(mockPublisher.PassedIPLDPayload)).To(Equal(1)) - Expect(mockPublisher.PassedIPLDPayload[0]).To(Equal(mocks.MockConvertedPayload)) - Expect(len(mockConverter.PassedStatediffPayload)).To(Equal(1)) - Expect(mockConverter.PassedStatediffPayload[0]).To(Equal(mocks.MockStateDiffPayload)) - Expect(mockRetriever.CalledTimes).To(Equal(1)) - Expect(len(mockFetcher.CalledAtBlockHeights)).To(Equal(1)) - Expect(mockFetcher.CalledAtBlockHeights[0]).To(Equal([]uint64{100})) - }) - - It("Finds beginning gap", func() { - mockPublisher := &mocks.IterativeIPLDPublisher{ - ReturnCIDPayload: []*eth.CIDPayload{mocks.MockCIDPayload, mocks.MockCIDPayload}, - ReturnErr: nil, - } - mockConverter := &mocks.IterativePayloadConverter{ - ReturnIPLDPayload: []eth.ConvertedPayload{mocks.MockConvertedPayload, mocks.MockConvertedPayload}, - ReturnErr: nil, - } - mockRetriever := &mocks2.CIDRetriever{ - FirstBlockNumberToReturn: 3, - GapsToRetrieve: []shared.Gap{ - { - Start: 0, - Stop: 2, - }, - }, - } - mockFetcher := &mocks2.PayloadFetcher{ - PayloadsToReturn: map[uint64]shared.RawChainData{ - 1: mocks.MockStateDiffPayload, - 2: mocks.MockStateDiffPayload, - }, - } - quitChan := make(chan bool, 1) - backfiller := &historical.BackFillService{ - Publisher: mockPublisher, - Converter: mockConverter, - Fetcher: mockFetcher, - Retriever: mockRetriever, - GapCheckFrequency: time.Second * 2, - BatchSize: shared.DefaultMaxBatchSize, - BatchNumber: shared.DefaultMaxBatchNumber, - QuitChan: quitChan, - } - wg := &sync.WaitGroup{} - backfiller.BackFill(wg) - time.Sleep(time.Second * 3) - quitChan <- true - Expect(len(mockPublisher.PassedIPLDPayload)).To(Equal(2)) - Expect(mockPublisher.PassedIPLDPayload[0]).To(Equal(mocks.MockConvertedPayload)) - Expect(mockPublisher.PassedIPLDPayload[1]).To(Equal(mocks.MockConvertedPayload)) - Expect(len(mockConverter.PassedStatediffPayload)).To(Equal(2)) - Expect(mockConverter.PassedStatediffPayload[0]).To(Equal(mocks.MockStateDiffPayload)) - Expect(mockConverter.PassedStatediffPayload[1]).To(Equal(mocks.MockStateDiffPayload)) - Expect(mockRetriever.CalledTimes).To(Equal(1)) - Expect(len(mockFetcher.CalledAtBlockHeights)).To(Equal(1)) - Expect(mockFetcher.CalledAtBlockHeights[0]).To(Equal([]uint64{0, 1, 2})) - }) - }) -}) diff --git a/pkg/ipfs/ipld/btc_header.go b/pkg/ipfs/ipld/btc_header.go deleted file mode 100644 index 5d171de0..00000000 --- a/pkg/ipfs/ipld/btc_header.go +++ /dev/null @@ -1,183 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package ipld - -import ( - "bytes" - "encoding/hex" - "fmt" - - "github.com/btcsuite/btcd/wire" - "github.com/ipfs/go-cid" - node "github.com/ipfs/go-ipld-format" - mh "github.com/multiformats/go-multihash" -) - -type BtcHeader struct { - *wire.BlockHeader - - rawdata []byte - cid cid.Cid -} - -// Static (compile time) check that BtcBtcHeader satisfies the node.Node interface. -var _ node.Node = (*BtcHeader)(nil) - -/* - INPUT -*/ - -// NewBtcHeader converts a *wire.Header into an BtcHeader IPLD node -func NewBtcHeader(header *wire.BlockHeader) (*BtcHeader, error) { - w := bytes.NewBuffer(make([]byte, 0, 80)) - if err := header.Serialize(w); err != nil { - return nil, err - } - rawdata := w.Bytes() - c, err := RawdataToCid(MBitcoinHeader, rawdata, mh.DBL_SHA2_256) - if err != nil { - return nil, err - } - return &BtcHeader{ - BlockHeader: header, - cid: c, - rawdata: rawdata, - }, nil -} - -/* - Block INTERFACE -*/ - -func (b *BtcHeader) Cid() cid.Cid { - return b.cid -} - -func (b *BtcHeader) RawData() []byte { - return b.rawdata -} - -func (b *BtcHeader) String() string { - return fmt.Sprintf("", b.cid) -} - -func (b *BtcHeader) Loggable() map[string]interface{} { - // TODO: more helpful info here - return map[string]interface{}{ - "type": "bitcoin_block", - } -} - -/* - Node INTERFACE -*/ - -func (b *BtcHeader) Links() []*node.Link { - return []*node.Link{ - { - Name: "tx", - Cid: sha256ToCid(MBitcoinTx, b.MerkleRoot.CloneBytes()), - }, - { - Name: "parent", - Cid: sha256ToCid(MBitcoinHeader, b.PrevBlock.CloneBytes()), - }, - } -} - -// Resolve attempts to traverse a path through this block. -func (b *BtcHeader) Resolve(path []string) (interface{}, []string, error) { - if len(path) == 0 { - return nil, nil, fmt.Errorf("zero length path") - } - switch path[0] { - case "version": - return b.Version, path[1:], nil - case "timestamp": - return b.Timestamp, path[1:], nil - case "bits": - return b.Bits, path[1:], nil - case "nonce": - return b.Nonce, path[1:], nil - case "parent": - return &node.Link{Cid: sha256ToCid(MBitcoinHeader, b.PrevBlock.CloneBytes())}, path[1:], nil - case "tx": - return &node.Link{Cid: sha256ToCid(MBitcoinTx, b.MerkleRoot.CloneBytes())}, path[1:], nil - default: - return nil, nil, fmt.Errorf("no such link") - } -} - -// ResolveLink is a helper function that allows easier traversal of links through blocks -func (b *BtcHeader) ResolveLink(path []string) (*node.Link, []string, error) { - out, rest, err := b.Resolve(path) - if err != nil { - return nil, nil, err - } - - lnk, ok := out.(*node.Link) - if !ok { - return nil, nil, fmt.Errorf("object at path was not a link") - } - - return lnk, rest, nil -} - -func cidToHash(c cid.Cid) []byte { - h := []byte(c.Hash()) - return h[len(h)-32:] -} - -func hashToCid(hv []byte, t uint64) cid.Cid { - h, _ := mh.Encode(hv, mh.DBL_SHA2_256) - return cid.NewCidV1(t, h) -} - -func (b *BtcHeader) Size() (uint64, error) { - return uint64(len(b.rawdata)), nil -} - -func (b *BtcHeader) Stat() (*node.NodeStat, error) { - return &node.NodeStat{}, nil -} - -func (b *BtcHeader) Tree(p string, depth int) []string { - // TODO: this isnt a correct implementation yet - return []string{"difficulty", "nonce", "version", "timestamp", "tx", "parent"} -} - -func (b *BtcHeader) BTCSha() []byte { - blkmh, _ := mh.Sum(b.rawdata, mh.DBL_SHA2_256, -1) - return blkmh[2:] -} - -func (b *BtcHeader) HexHash() string { - return hex.EncodeToString(revString(b.BTCSha())) -} - -func (b *BtcHeader) Copy() node.Node { - nb := *b // cheating shallow copy - return &nb -} - -func revString(s []byte) []byte { - b := make([]byte, len(s)) - for i, v := range []byte(s) { - b[len(b)-(i+1)] = v - } - return b -} diff --git a/pkg/ipfs/ipld/btc_parser.go b/pkg/ipfs/ipld/btc_parser.go deleted file mode 100644 index a554b667..00000000 --- a/pkg/ipfs/ipld/btc_parser.go +++ /dev/null @@ -1,74 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package ipld - -import ( - "github.com/btcsuite/btcd/wire" - "github.com/btcsuite/btcutil" - node "github.com/ipfs/go-ipld-format" -) - -// FromHeaderAndTxs takes a block header and txs and processes it -// to return it a set of IPLD nodes for further processing. -func FromHeaderAndTxs(header *wire.BlockHeader, txs []*btcutil.Tx) (*BtcHeader, []*BtcTx, []*BtcTxTrie, error) { - var txNodes []*BtcTx - for _, tx := range txs { - txNode, err := NewBtcTx(tx.MsgTx()) - if err != nil { - return nil, nil, nil, err - } - txNodes = append(txNodes, txNode) - } - txTrie, err := mkMerkleTree(txNodes) - if err != nil { - return nil, nil, nil, err - } - headerNode, err := NewBtcHeader(header) - return headerNode, txNodes, txTrie, err -} - -func mkMerkleTree(txs []*BtcTx) ([]*BtcTxTrie, error) { - layer := make([]node.Node, len(txs)) - for i, tx := range txs { - layer[i] = tx - } - var out []*BtcTxTrie - var next []node.Node - for len(layer) > 1 { - if len(layer)%2 != 0 { - layer = append(layer, layer[len(layer)-1]) - } - for i := 0; i < len(layer)/2; i++ { - var left, right node.Node - left = layer[i*2] - right = layer[(i*2)+1] - - t := &BtcTxTrie{ - Left: &node.Link{Cid: left.Cid()}, - Right: &node.Link{Cid: right.Cid()}, - } - - out = append(out, t) - next = append(next, t) - } - - layer = next - next = nil - } - - return out, nil -} diff --git a/pkg/ipfs/ipld/btc_tx.go b/pkg/ipfs/ipld/btc_tx.go deleted file mode 100644 index f37332d3..00000000 --- a/pkg/ipfs/ipld/btc_tx.go +++ /dev/null @@ -1,258 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package ipld - -import ( - "bytes" - "encoding/hex" - "fmt" - "strconv" - - "github.com/btcsuite/btcd/wire" - "github.com/ipfs/go-cid" - node "github.com/ipfs/go-ipld-format" - mh "github.com/multiformats/go-multihash" -) - -type BtcTx struct { - *wire.MsgTx - - rawdata []byte - cid cid.Cid -} - -// Static (compile time) check that BtcBtcHeader satisfies the node.Node interface. -var _ node.Node = (*BtcTx)(nil) - -/* - INPUT -*/ - -// NewBtcTx converts a *wire.MsgTx into an BtcTx IPLD node -func NewBtcTx(tx *wire.MsgTx) (*BtcTx, error) { - w := bytes.NewBuffer(make([]byte, 0, tx.SerializeSize())) - if err := tx.Serialize(w); err != nil { - return nil, err - } - rawdata := w.Bytes() - c, err := RawdataToCid(MBitcoinTx, rawdata, mh.DBL_SHA2_256) - if err != nil { - return nil, err - } - return &BtcTx{ - MsgTx: tx, - cid: c, - rawdata: rawdata, - }, nil -} - -/* - Block INTERFACE -*/ - -func (t *BtcTx) Cid() cid.Cid { - return t.cid -} - -func (t *BtcTx) RawData() []byte { - return t.rawdata -} - -func (t *BtcTx) String() string { - return fmt.Sprintf("", t.cid) -} - -func (t *BtcTx) Loggable() map[string]interface{} { - return map[string]interface{}{ - "type": "bitcoinTx", - } -} - -/* - Node INTERFACE -*/ - -func (t *BtcTx) Links() []*node.Link { - var out []*node.Link - for i, in := range t.MsgTx.TxIn { - lnk := &node.Link{Cid: sha256ToCid(MBitcoinTx, in.PreviousOutPoint.Hash.CloneBytes())} - lnk.Name = fmt.Sprintf("inputs/%d/prevTx", i) - out = append(out, lnk) - } - return out -} - -func (t *BtcTx) Resolve(path []string) (interface{}, []string, error) { - switch path[0] { - case "version": - return t.Version, path[1:], nil - case "lockTime": - return t.LockTime, path[1:], nil - case "inputs": - if len(path) == 1 { - return t.MsgTx.TxIn, nil, nil - } - - index, err := strconv.Atoi(path[1]) - if err != nil { - return nil, nil, err - } - - if index >= len(t.MsgTx.TxIn) || index < 0 { - return nil, nil, fmt.Errorf("index out of range") - } - - inp := t.MsgTx.TxIn[index] - if len(path) == 2 { - return inp, nil, nil - } - - switch path[2] { - case "prevTx": - return &node.Link{Cid: sha256ToCid(MBitcoinTx, inp.PreviousOutPoint.Hash.CloneBytes())}, path[3:], nil - case "seqNo": - return inp.Sequence, path[3:], nil - case "script": - return inp.SignatureScript, path[3:], nil - default: - return nil, nil, fmt.Errorf("no such link") - } - case "outputs": - if len(path) == 1 { - return t.TxOut, nil, nil - } - - index, err := strconv.Atoi(path[1]) - if err != nil { - return nil, nil, err - } - - if index >= len(t.TxOut) || index < 0 { - return nil, nil, fmt.Errorf("index out of range") - } - - outp := t.TxOut[index] - if len(path) == 2 { - return outp, path[2:], nil - } - - switch path[2] { - case "value": - return outp.Value, path[3:], nil - case "script": - /* - if outp.Script[0] == 0x6a { // OP_RETURN - c, err := cid.Decode(string(outp.Script[1:])) - if err == nil { - return &node.Link{Cid: c}, path[3:], nil - } - } - */ - return outp.PkScript, path[3:], nil - default: - return nil, nil, fmt.Errorf("no such link") - } - default: - return nil, nil, fmt.Errorf("no such link") - } -} - -func (t *BtcTx) ResolveLink(path []string) (*node.Link, []string, error) { - i, rest, err := t.Resolve(path) - if err != nil { - return nil, rest, err - } - - lnk, ok := i.(*node.Link) - if !ok { - return nil, nil, fmt.Errorf("value was not a link") - } - - return lnk, rest, nil -} - -func (t *BtcTx) Size() (uint64, error) { - return uint64(len(t.RawData())), nil -} - -func (t *BtcTx) Stat() (*node.NodeStat, error) { - return &node.NodeStat{}, nil -} - -func (t *BtcTx) Copy() node.Node { - nt := *t // cheating shallow copy - return &nt -} - -func (t *BtcTx) Tree(p string, depth int) []string { - if depth == 0 { - return nil - } - - switch p { - case "inputs": - return t.treeInputs(nil, depth+1) - case "outputs": - return t.treeOutputs(nil, depth+1) - case "": - out := []string{"version", "timeLock", "inputs", "outputs"} - out = t.treeInputs(out, depth) - out = t.treeOutputs(out, depth) - return out - default: - return nil - } -} - -func (t *BtcTx) treeInputs(out []string, depth int) []string { - if depth < 2 { - return out - } - - for i := range t.TxIn { - inp := "inputs/" + fmt.Sprint(i) - out = append(out, inp) - if depth > 2 { - out = append(out, inp+"/prevTx", inp+"/seqNo", inp+"/script") - } - } - return out -} - -func (t *BtcTx) treeOutputs(out []string, depth int) []string { - if depth < 2 { - return out - } - - for i := range t.TxOut { - o := "outputs/" + fmt.Sprint(i) - out = append(out, o) - if depth > 2 { - out = append(out, o+"/script", o+"/value") - } - } - return out -} - -func (t *BtcTx) BTCSha() []byte { - mh, _ := mh.Sum(t.RawData(), mh.DBL_SHA2_256, -1) - return []byte(mh[2:]) -} - -func (t *BtcTx) HexHash() string { - return hex.EncodeToString(revString(t.BTCSha())) -} diff --git a/pkg/ipfs/ipld/btc_tx_trie.go b/pkg/ipfs/ipld/btc_tx_trie.go deleted file mode 100644 index b88194a8..00000000 --- a/pkg/ipfs/ipld/btc_tx_trie.go +++ /dev/null @@ -1,110 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package ipld - -import ( - "fmt" - - "github.com/ipfs/go-cid" - node "github.com/ipfs/go-ipld-format" - mh "github.com/multiformats/go-multihash" -) - -type BtcTxTrie struct { - Left *node.Link - Right *node.Link -} - -func (t *BtcTxTrie) BTCSha() []byte { - return cidToHash(t.Cid()) -} - -func (t *BtcTxTrie) Cid() cid.Cid { - h, _ := mh.Sum(t.RawData(), mh.DBL_SHA2_256, -1) - return cid.NewCidV1(cid.BitcoinTx, h) -} - -func (t *BtcTxTrie) Links() []*node.Link { - return []*node.Link{t.Left, t.Right} -} - -func (t *BtcTxTrie) RawData() []byte { - out := make([]byte, 64) - lbytes := cidToHash(t.Left.Cid) - copy(out[:32], lbytes) - - rbytes := cidToHash(t.Right.Cid) - copy(out[32:], rbytes) - - return out -} - -func (t *BtcTxTrie) Loggable() map[string]interface{} { - return map[string]interface{}{ - "type": "bitcoin_tx_tree", - } -} - -func (t *BtcTxTrie) Resolve(path []string) (interface{}, []string, error) { - if len(path) == 0 { - return nil, nil, fmt.Errorf("zero length path") - } - - switch path[0] { - case "0": - return t.Left, path[1:], nil - case "1": - return t.Right, path[1:], nil - default: - return nil, nil, fmt.Errorf("no such link") - } -} - -func (t *BtcTxTrie) Copy() node.Node { - nt := *t - return &nt -} - -func (t *BtcTxTrie) ResolveLink(path []string) (*node.Link, []string, error) { - out, rest, err := t.Resolve(path) - if err != nil { - return nil, nil, err - } - - lnk, ok := out.(*node.Link) - if ok { - return lnk, rest, nil - } - - return nil, nil, fmt.Errorf("path did not lead to link") -} - -func (t *BtcTxTrie) Size() (uint64, error) { - return uint64(len(t.RawData())), nil -} - -func (t *BtcTxTrie) Stat() (*node.NodeStat, error) { - return &node.NodeStat{}, nil -} - -func (t *BtcTxTrie) String() string { - return fmt.Sprintf("[bitcoin transaction tree]") -} - -func (t *BtcTxTrie) Tree(p string, depth int) []string { - return []string{"0", "1"} -} diff --git a/pkg/ipfs/ipld/eth_account.go b/pkg/ipfs/ipld/eth_account.go deleted file mode 100644 index 5d80af1d..00000000 --- a/pkg/ipfs/ipld/eth_account.go +++ /dev/null @@ -1,175 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package ipld - -import ( - "encoding/json" - "fmt" - "math/big" - - "github.com/ipfs/go-cid" - node "github.com/ipfs/go-ipld-format" -) - -// EthAccountSnapshot (eth-account-snapshot codec 0x97) -// represents an ethereum account, i.e. a wallet address or -// a smart contract -type EthAccountSnapshot struct { - *EthAccount - - cid cid.Cid - rawdata []byte -} - -// EthAccount is the building block of EthAccountSnapshot. -// Or, is the former stripped of its cid and rawdata components. -type EthAccount struct { - Nonce uint64 - Balance *big.Int - Root []byte // This is the storage root trie - CodeHash []byte // This is the hash of the EVM code -} - -// Static (compile time) check that EthAccountSnapshot satisfies the -// node.Node interface. -var _ node.Node = (*EthAccountSnapshot)(nil) - -/* - INPUT -*/ - -// Input should be managed by EthStateTrie - -/* - OUTPUT -*/ - -// Output should be managed by EthStateTrie - -/* - Block INTERFACE -*/ - -// RawData returns the binary of the RLP encode of the account snapshot. -func (as *EthAccountSnapshot) RawData() []byte { - return as.rawdata -} - -// Cid returns the cid of the transaction. -func (as *EthAccountSnapshot) Cid() cid.Cid { - return as.cid -} - -// String is a helper for output -func (as *EthAccountSnapshot) String() string { - return fmt.Sprintf("", as.cid) -} - -// Loggable returns in a map the type of IPLD Link. -func (as *EthAccountSnapshot) Loggable() map[string]interface{} { - return map[string]interface{}{ - "type": "eth-account-snapshot", - } -} - -/* - Node INTERFACE -*/ - -// Resolve resolves a path through this node, stopping at any link boundary -// and returning the object found as well as the remaining path to traverse -func (as *EthAccountSnapshot) Resolve(p []string) (interface{}, []string, error) { - if len(p) == 0 { - return as, nil, nil - } - - if len(p) > 1 { - return nil, nil, fmt.Errorf("unexpected path elements past %s", p[0]) - } - - switch p[0] { - case "balance": - return as.Balance, nil, nil - case "codeHash": - return &node.Link{Cid: keccak256ToCid(RawBinary, as.CodeHash)}, nil, nil - case "nonce": - return as.Nonce, nil, nil - case "root": - return &node.Link{Cid: keccak256ToCid(MEthStorageTrie, as.Root)}, nil, nil - default: - return nil, nil, fmt.Errorf("no such link") - } -} - -// Tree lists all paths within the object under 'path', and up to the given depth. -// To list the entire object (similar to `find .`) pass "" and -1 -func (as *EthAccountSnapshot) Tree(p string, depth int) []string { - if p != "" || depth == 0 { - return nil - } - return []string{"balance", "codeHash", "nonce", "root"} -} - -// ResolveLink is a helper function that calls resolve and asserts the -// output is a link -func (as *EthAccountSnapshot) ResolveLink(p []string) (*node.Link, []string, error) { - obj, rest, err := as.Resolve(p) - if err != nil { - return nil, nil, err - } - - if lnk, ok := obj.(*node.Link); ok { - return lnk, rest, nil - } - - return nil, nil, fmt.Errorf("resolved item was not a link") -} - -// Copy will go away. It is here to comply with the interface. -func (as *EthAccountSnapshot) Copy() node.Node { - panic("dont use this yet") -} - -// Links is a helper function that returns all links within this object -func (as *EthAccountSnapshot) Links() []*node.Link { - return nil -} - -// Stat will go away. It is here to comply with the interface. -func (as *EthAccountSnapshot) Stat() (*node.NodeStat, error) { - return &node.NodeStat{}, nil -} - -// Size will go away. It is here to comply with the interface. -func (as *EthAccountSnapshot) Size() (uint64, error) { - return 0, nil -} - -/* - EthAccountSnapshot functions -*/ - -// MarshalJSON processes the transaction into readable JSON format. -func (as *EthAccountSnapshot) MarshalJSON() ([]byte, error) { - out := map[string]interface{}{ - "balance": as.Balance, - "codeHash": keccak256ToCid(RawBinary, as.CodeHash), - "nonce": as.Nonce, - "root": keccak256ToCid(MEthStorageTrie, as.Root), - } - return json.Marshal(out) -} diff --git a/pkg/ipfs/ipld/eth_header.go b/pkg/ipfs/ipld/eth_header.go deleted file mode 100644 index c33931d4..00000000 --- a/pkg/ipfs/ipld/eth_header.go +++ /dev/null @@ -1,256 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package ipld - -import ( - "encoding/json" - "fmt" - - "github.com/ethereum/go-ethereum/core/types" - "github.com/ethereum/go-ethereum/rlp" - "github.com/ipfs/go-cid" - node "github.com/ipfs/go-ipld-format" - mh "github.com/multiformats/go-multihash" -) - -// EthHeader (eth-block, codec 0x90), represents an ethereum block header -type EthHeader struct { - *types.Header - - cid cid.Cid - rawdata []byte -} - -// Static (compile time) check that EthHeader satisfies the node.Node interface. -var _ node.Node = (*EthHeader)(nil) - -/* - INPUT -*/ - -// NewEthHeader converts a *types.Header into an EthHeader IPLD node -func NewEthHeader(header *types.Header) (*EthHeader, error) { - headerRLP, err := rlp.EncodeToBytes(header) - if err != nil { - return nil, err - } - c, err := RawdataToCid(MEthHeader, headerRLP, mh.KECCAK_256) - if err != nil { - return nil, err - } - return &EthHeader{ - Header: header, - cid: c, - rawdata: headerRLP, - }, nil -} - -/* - OUTPUT -*/ - -// DecodeEthHeader takes a cid and its raw binary data -// from IPFS and returns an EthTx object for further processing. -func DecodeEthHeader(c cid.Cid, b []byte) (*EthHeader, error) { - var h *types.Header - if err := rlp.DecodeBytes(b, h); err != nil { - return nil, err - } - return &EthHeader{ - Header: h, - cid: c, - rawdata: b, - }, nil -} - -/* - Block INTERFACE -*/ - -// RawData returns the binary of the RLP encode of the block header. -func (b *EthHeader) RawData() []byte { - return b.rawdata -} - -// Cid returns the cid of the block header. -func (b *EthHeader) Cid() cid.Cid { - return b.cid -} - -// String is a helper for output -func (b *EthHeader) String() string { - return fmt.Sprintf("", b.cid) -} - -// Loggable returns a map the type of IPLD Link. -func (b *EthHeader) Loggable() map[string]interface{} { - return map[string]interface{}{ - "type": "eth-block", - } -} - -/* - Node INTERFACE -*/ - -// Resolve resolves a path through this node, stopping at any link boundary -// and returning the object found as well as the remaining path to traverse -func (b *EthHeader) Resolve(p []string) (interface{}, []string, error) { - if len(p) == 0 { - return b, nil, nil - } - - first, rest := p[0], p[1:] - - switch first { - case "parent": - return &node.Link{Cid: commonHashToCid(MEthHeader, b.ParentHash)}, rest, nil - case "receipts": - return &node.Link{Cid: commonHashToCid(MEthTxReceiptTrie, b.ReceiptHash)}, rest, nil - case "root": - return &node.Link{Cid: commonHashToCid(MEthStateTrie, b.Root)}, rest, nil - case "tx": - return &node.Link{Cid: commonHashToCid(MEthTxTrie, b.TxHash)}, rest, nil - case "uncles": - return &node.Link{Cid: commonHashToCid(MEthHeaderList, b.UncleHash)}, rest, nil - } - - if len(p) != 1 { - return nil, nil, fmt.Errorf("unexpected path elements past %s", first) - } - - switch first { - case "bloom": - return b.Bloom, nil, nil - case "coinbase": - return b.Coinbase, nil, nil - case "difficulty": - return b.Difficulty, nil, nil - case "extra": - // This is a []byte. By default they are marshalled into Base64. - return fmt.Sprintf("0x%x", b.Extra), nil, nil - case "gaslimit": - return b.GasLimit, nil, nil - case "gasused": - return b.GasUsed, nil, nil - case "mixdigest": - return b.MixDigest, nil, nil - case "nonce": - return b.Nonce, nil, nil - case "number": - return b.Number, nil, nil - case "time": - return b.Time, nil, nil - default: - return nil, nil, fmt.Errorf("no such link") - } -} - -// Tree lists all paths within the object under 'path', and up to the given depth. -// To list the entire object (similar to `find .`) pass "" and -1 -func (b *EthHeader) Tree(p string, depth int) []string { - if p != "" || depth == 0 { - return nil - } - - return []string{ - "time", - "bloom", - "coinbase", - "difficulty", - "extra", - "gaslimit", - "gasused", - "mixdigest", - "nonce", - "number", - "parent", - "receipts", - "root", - "tx", - "uncles", - } -} - -// ResolveLink is a helper function that allows easier traversal of links through blocks -func (b *EthHeader) ResolveLink(p []string) (*node.Link, []string, error) { - obj, rest, err := b.Resolve(p) - if err != nil { - return nil, nil, err - } - - if lnk, ok := obj.(*node.Link); ok { - return lnk, rest, nil - } - - return nil, nil, fmt.Errorf("resolved item was not a link") -} - -// Copy will go away. It is here to comply with the Node interface. -func (b *EthHeader) Copy() node.Node { - panic("implement me") -} - -// Links is a helper function that returns all links within this object -// HINT: Use `ipfs refs ` -func (b *EthHeader) Links() []*node.Link { - return []*node.Link{ - {Cid: commonHashToCid(MEthHeader, b.ParentHash)}, - {Cid: commonHashToCid(MEthTxReceiptTrie, b.ReceiptHash)}, - {Cid: commonHashToCid(MEthStateTrie, b.Root)}, - {Cid: commonHashToCid(MEthTxTrie, b.TxHash)}, - {Cid: commonHashToCid(MEthHeaderList, b.UncleHash)}, - } -} - -// Stat will go away. It is here to comply with the Node interface. -func (b *EthHeader) Stat() (*node.NodeStat, error) { - return &node.NodeStat{}, nil -} - -// Size will go away. It is here to comply with the Node interface. -func (b *EthHeader) Size() (uint64, error) { - return 0, nil -} - -/* - EthHeader functions -*/ - -// MarshalJSON processes the block header into readable JSON format, -// converting the right links into their cids, and keeping the original -// hex hash, allowing the user to simplify external queries. -func (b *EthHeader) MarshalJSON() ([]byte, error) { - out := map[string]interface{}{ - "time": b.Time, - "bloom": b.Bloom, - "coinbase": b.Coinbase, - "difficulty": b.Difficulty, - "extra": fmt.Sprintf("0x%x", b.Extra), - "gaslimit": b.GasLimit, - "gasused": b.GasUsed, - "mixdigest": b.MixDigest, - "nonce": b.Nonce, - "number": b.Number, - "parent": commonHashToCid(MEthHeader, b.ParentHash), - "receipts": commonHashToCid(MEthTxReceiptTrie, b.ReceiptHash), - "root": commonHashToCid(MEthStateTrie, b.Root), - "tx": commonHashToCid(MEthTxTrie, b.TxHash), - "uncles": commonHashToCid(MEthHeaderList, b.UncleHash), - } - return json.Marshal(out) -} diff --git a/pkg/ipfs/ipld/eth_parser.go b/pkg/ipfs/ipld/eth_parser.go deleted file mode 100644 index f02d7d40..00000000 --- a/pkg/ipfs/ipld/eth_parser.go +++ /dev/null @@ -1,97 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package ipld - -import ( - "bytes" - "fmt" - - "github.com/ethereum/go-ethereum/core/types" -) - -// FromBlockAndReceipts takes a block and processes it -// to return it a set of IPLD nodes for further processing. -func FromBlockAndReceipts(block *types.Block, receipts []*types.Receipt) (*EthHeader, []*EthHeader, []*EthTx, []*EthTxTrie, []*EthReceipt, []*EthRctTrie, error) { - // Process the header - headerNode, err := NewEthHeader(block.Header()) - if err != nil { - return nil, nil, nil, nil, nil, nil, err - } - // Process the uncles - uncleNodes := make([]*EthHeader, len(block.Uncles())) - for i, uncle := range block.Uncles() { - uncleNode, err := NewEthHeader(uncle) - if err != nil { - return nil, nil, nil, nil, nil, nil, err - } - uncleNodes[i] = uncleNode - } - // Process the txs - ethTxNodes, ethTxTrieNodes, err := processTransactions(block.Transactions(), - block.Header().TxHash[:]) - if err != nil { - return nil, nil, nil, nil, nil, nil, err - } - // Process the receipts - ethRctNodes, ethRctTrieNodes, err := processReceipts(receipts, - block.Header().ReceiptHash[:]) - return headerNode, uncleNodes, ethTxNodes, ethTxTrieNodes, ethRctNodes, ethRctTrieNodes, err -} - -// processTransactions will take the found transactions in a parsed block body -// to return IPLD node slices for eth-tx and eth-tx-trie -func processTransactions(txs []*types.Transaction, expectedTxRoot []byte) ([]*EthTx, []*EthTxTrie, error) { - var ethTxNodes []*EthTx - transactionTrie := newTxTrie() - - for idx, tx := range txs { - ethTx, err := NewEthTx(tx) - if err != nil { - return nil, nil, err - } - ethTxNodes = append(ethTxNodes, ethTx) - transactionTrie.add(idx, ethTx.RawData()) - } - - if !bytes.Equal(transactionTrie.rootHash(), expectedTxRoot) { - return nil, nil, fmt.Errorf("wrong transaction hash computed") - } - - return ethTxNodes, transactionTrie.getNodes(), nil -} - -// processReceipts will take in receipts -// to return IPLD node slices for eth-rct and eth-rct-trie -func processReceipts(rcts []*types.Receipt, expectedRctRoot []byte) ([]*EthReceipt, []*EthRctTrie, error) { - var ethRctNodes []*EthReceipt - receiptTrie := newRctTrie() - - for idx, rct := range rcts { - ethRct, err := NewReceipt(rct) - if err != nil { - return nil, nil, err - } - ethRctNodes = append(ethRctNodes, ethRct) - receiptTrie.add(idx, ethRct.RawData()) - } - - if !bytes.Equal(receiptTrie.rootHash(), expectedRctRoot) { - return nil, nil, fmt.Errorf("wrong receipt hash computed") - } - - return ethRctNodes, receiptTrie.getNodes(), nil -} diff --git a/pkg/ipfs/ipld/eth_receipt.go b/pkg/ipfs/ipld/eth_receipt.go deleted file mode 100644 index cfa46b36..00000000 --- a/pkg/ipfs/ipld/eth_receipt.go +++ /dev/null @@ -1,199 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package ipld - -import ( - "encoding/json" - "fmt" - "strconv" - - "github.com/ethereum/go-ethereum/core/types" - "github.com/ethereum/go-ethereum/rlp" - "github.com/ipfs/go-cid" - node "github.com/ipfs/go-ipld-format" - mh "github.com/multiformats/go-multihash" -) - -type EthReceipt struct { - *types.Receipt - - rawdata []byte - cid cid.Cid -} - -// Static (compile time) check that EthReceipt satisfies the node.Node interface. -var _ node.Node = (*EthReceipt)(nil) - -/* - INPUT -*/ - -// NewReceipt converts a types.ReceiptForStorage to an EthReceipt IPLD node -func NewReceipt(receipt *types.Receipt) (*EthReceipt, error) { - receiptRLP, err := rlp.EncodeToBytes(receipt) - if err != nil { - return nil, err - } - c, err := RawdataToCid(MEthTxReceipt, receiptRLP, mh.KECCAK_256) - if err != nil { - return nil, err - } - return &EthReceipt{ - Receipt: receipt, - cid: c, - rawdata: receiptRLP, - }, nil -} - -/* - OUTPUT -*/ - -// DecodeEthReceipt takes a cid and its raw binary data -// from IPFS and returns an EthTx object for further processing. -func DecodeEthReceipt(c cid.Cid, b []byte) (*EthReceipt, error) { - var r *types.Receipt - if err := rlp.DecodeBytes(b, r); err != nil { - return nil, err - } - return &EthReceipt{ - Receipt: r, - cid: c, - rawdata: b, - }, nil -} - -/* - Block INTERFACE -*/ - -func (node *EthReceipt) RawData() []byte { - return node.rawdata -} - -func (node *EthReceipt) Cid() cid.Cid { - return node.cid -} - -// String is a helper for output -func (r *EthReceipt) String() string { - return fmt.Sprintf("", r.cid) -} - -// Loggable returns in a map the type of IPLD Link. -func (r *EthReceipt) Loggable() map[string]interface{} { - return map[string]interface{}{ - "type": "eth-receipt", - } -} - -// Resolve resolves a path through this node, stopping at any link boundary -// and returning the object found as well as the remaining path to traverse -func (r *EthReceipt) Resolve(p []string) (interface{}, []string, error) { - if len(p) == 0 { - return r, nil, nil - } - - if len(p) > 1 { - return nil, nil, fmt.Errorf("unexpected path elements past %s", p[0]) - } - - switch p[0] { - - case "root": - return r.PostState, nil, nil - case "status": - return r.Status, nil, nil - case "cumulativeGasUsed": - return r.CumulativeGasUsed, nil, nil - case "logsBloom": - return r.Bloom, nil, nil - case "logs": - return r.Logs, nil, nil - case "transactionHash": - return r.TxHash, nil, nil - case "contractAddress": - return r.ContractAddress, nil, nil - case "gasUsed": - return r.GasUsed, nil, nil - default: - return nil, nil, fmt.Errorf("no such link") - } -} - -// Tree lists all paths within the object under 'path', and up to the given depth. -// To list the entire object (similar to `find .`) pass "" and -1 -func (r *EthReceipt) Tree(p string, depth int) []string { - if p != "" || depth == 0 { - return nil - } - return []string{"root", "status", "cumulativeGasUsed", "logsBloom", "logs", "transactionHash", "contractAddress", "gasUsed"} -} - -// ResolveLink is a helper function that calls resolve and asserts the -// output is a link -func (r *EthReceipt) ResolveLink(p []string) (*node.Link, []string, error) { - obj, rest, err := r.Resolve(p) - if err != nil { - return nil, nil, err - } - - if lnk, ok := obj.(*node.Link); ok { - return lnk, rest, nil - } - - return nil, nil, fmt.Errorf("resolved item was not a link") -} - -// Copy will go away. It is here to comply with the Node interface. -func (*EthReceipt) Copy() node.Node { - panic("implement me") -} - -// Links is a helper function that returns all links within this object -func (*EthReceipt) Links() []*node.Link { - return nil -} - -// Stat will go away. It is here to comply with the interface. -func (r *EthReceipt) Stat() (*node.NodeStat, error) { - return &node.NodeStat{}, nil -} - -// Size will go away. It is here to comply with the interface. -func (r *EthReceipt) Size() (uint64, error) { - return strconv.ParseUint(r.Receipt.Size().String(), 10, 64) -} - -/* - EthReceipt functions -*/ - -// MarshalJSON processes the receipt into readable JSON format. -func (r *EthReceipt) MarshalJSON() ([]byte, error) { - out := map[string]interface{}{ - "root": r.PostState, - "status": r.Status, - "cumulativeGasUsed": r.CumulativeGasUsed, - "logsBloom": r.Bloom, - "logs": r.Logs, - "transactionHash": r.TxHash, - "contractAddress": r.ContractAddress, - "gasUsed": r.GasUsed, - } - return json.Marshal(out) -} diff --git a/pkg/ipfs/ipld/eth_receipt_trie.go b/pkg/ipfs/ipld/eth_receipt_trie.go deleted file mode 100644 index 6a1b7e40..00000000 --- a/pkg/ipfs/ipld/eth_receipt_trie.go +++ /dev/null @@ -1,152 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package ipld - -import ( - "fmt" - - "github.com/ipfs/go-cid" - node "github.com/ipfs/go-ipld-format" - "github.com/multiformats/go-multihash" - - "github.com/ethereum/go-ethereum/core/types" - "github.com/ethereum/go-ethereum/rlp" -) - -// EthRctTrie (eth-tx-trie codec 0x92) represents -// a node from the transaction trie in ethereum. -type EthRctTrie struct { - *TrieNode -} - -// Static (compile time) check that EthRctTrie satisfies the node.Node interface. -var _ node.Node = (*EthRctTrie)(nil) - -/* - INPUT -*/ - -// To create a proper trie of the eth-tx-trie objects, it is required -// to input all transactions belonging to a forest in a single step. -// We are adding the transactions, and creating its trie on -// block body parsing time. - -/* - OUTPUT -*/ - -// DecodeEthRctTrie returns an EthRctTrie object from its cid and rawdata. -func DecodeEthRctTrie(c cid.Cid, b []byte) (*EthRctTrie, error) { - tn, err := decodeTrieNode(c, b, decodeEthRctTrieLeaf) - if err != nil { - return nil, err - } - return &EthRctTrie{TrieNode: tn}, nil -} - -// decodeEthRctTrieLeaf parses a eth-rct-trie leaf -//from decoded RLP elements -func decodeEthRctTrieLeaf(i []interface{}) ([]interface{}, error) { - var r types.Receipt - err := rlp.DecodeBytes(i[1].([]byte), &r) - if err != nil { - return nil, err - } - c, err := RawdataToCid(MEthTxReceipt, i[1].([]byte), multihash.KECCAK_256) - if err != nil { - return nil, err - } - return []interface{}{ - i[0].([]byte), - &EthReceipt{ - Receipt: &r, - cid: c, - rawdata: i[1].([]byte), - }, - }, nil -} - -/* - Block INTERFACE -*/ - -// RawData returns the binary of the RLP encode of the transaction. -func (t *EthRctTrie) RawData() []byte { - return t.rawdata -} - -// Cid returns the cid of the transaction. -func (t *EthRctTrie) Cid() cid.Cid { - return t.cid -} - -// String is a helper for output -func (t *EthRctTrie) String() string { - return fmt.Sprintf("", t.cid) -} - -// Loggable returns in a map the type of IPLD Link. -func (t *EthRctTrie) Loggable() map[string]interface{} { - return map[string]interface{}{ - "type": "eth-rct-trie", - } -} - -/* - EthRctTrie functions -*/ - -// rctTrie wraps a localTrie for use on the receipt trie. -type rctTrie struct { - *localTrie -} - -// newRctTrie initializes and returns a rctTrie. -func newRctTrie() *rctTrie { - return &rctTrie{ - localTrie: newLocalTrie(), - } -} - -// getNodes invokes the localTrie, which computes the root hash of the -// transaction trie and returns its database keys, to return a slice -// of EthRctTrie nodes. -func (rt *rctTrie) getNodes() []*EthRctTrie { - keys := rt.getKeys() - var out []*EthRctTrie - it := rt.trie.NodeIterator([]byte{}) - for it.Next(true) { - - } - for _, k := range keys { - rawdata, err := rt.db.Get(k) - if err != nil { - panic(err) - } - c, err := RawdataToCid(MEthTxReceiptTrie, rawdata, multihash.KECCAK_256) - if err != nil { - return nil - } - tn := &TrieNode{ - cid: c, - rawdata: rawdata, - } - out = append(out, &EthRctTrie{TrieNode: tn}) - } - - return out -} diff --git a/pkg/ipfs/ipld/eth_state.go b/pkg/ipfs/ipld/eth_state.go deleted file mode 100644 index a127f956..00000000 --- a/pkg/ipfs/ipld/eth_state.go +++ /dev/null @@ -1,114 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package ipld - -import ( - "fmt" - - "github.com/ipfs/go-cid" - node "github.com/ipfs/go-ipld-format" - "github.com/multiformats/go-multihash" - - "github.com/ethereum/go-ethereum/rlp" -) - -// EthStateTrie (eth-state-trie, codec 0x96), represents -// a node from the satte trie in ethereum. -type EthStateTrie struct { - *TrieNode -} - -// Static (compile time) check that EthStateTrie satisfies the node.Node interface. -var _ node.Node = (*EthStateTrie)(nil) - -/* - INPUT -*/ - -// FromStateTrieRLP takes the RLP representation of an ethereum -// state trie node to return it as an IPLD node for further processing. -func FromStateTrieRLP(raw []byte) (*EthStateTrie, error) { - c, err := RawdataToCid(MEthStateTrie, raw, multihash.KECCAK_256) - if err != nil { - return nil, err - } - // Let's run the whole mile and process the nodeKind and - // its elements, in case somebody would need this function - // to parse an RLP element from the filesystem - return DecodeEthStateTrie(c, raw) -} - -/* - OUTPUT -*/ - -// DecodeEthStateTrie returns an EthStateTrie object from its cid and rawdata. -func DecodeEthStateTrie(c cid.Cid, b []byte) (*EthStateTrie, error) { - tn, err := decodeTrieNode(c, b, decodeEthStateTrieLeaf) - if err != nil { - return nil, err - } - return &EthStateTrie{TrieNode: tn}, nil -} - -// decodeEthStateTrieLeaf parses a eth-tx-trie leaf -// from decoded RLP elements -func decodeEthStateTrieLeaf(i []interface{}) ([]interface{}, error) { - var account EthAccount - err := rlp.DecodeBytes(i[1].([]byte), &account) - if err != nil { - return nil, err - } - c, err := RawdataToCid(MEthAccountSnapshot, i[1].([]byte), multihash.KECCAK_256) - if err != nil { - return nil, err - } - return []interface{}{ - i[0].([]byte), - &EthAccountSnapshot{ - EthAccount: &account, - cid: c, - rawdata: i[1].([]byte), - }, - }, nil -} - -/* - Block INTERFACE -*/ - -// RawData returns the binary of the RLP encode of the state trie node. -func (st *EthStateTrie) RawData() []byte { - return st.rawdata -} - -// Cid returns the cid of the state trie node. -func (st *EthStateTrie) Cid() cid.Cid { - return st.cid -} - -// String is a helper for output -func (st *EthStateTrie) String() string { - return fmt.Sprintf("", st.cid) -} - -// Loggable returns in a map the type of IPLD Link. -func (st *EthStateTrie) Loggable() map[string]interface{} { - return map[string]interface{}{ - "type": "eth-state-trie", - } -} diff --git a/pkg/ipfs/ipld/eth_storage.go b/pkg/ipfs/ipld/eth_storage.go deleted file mode 100644 index 779cad4d..00000000 --- a/pkg/ipfs/ipld/eth_storage.go +++ /dev/null @@ -1,100 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package ipld - -import ( - "fmt" - - "github.com/ipfs/go-cid" - node "github.com/ipfs/go-ipld-format" - "github.com/multiformats/go-multihash" -) - -// EthStorageTrie (eth-storage-trie, codec 0x98), represents -// a node from the storage trie in ethereum. -type EthStorageTrie struct { - *TrieNode -} - -// Static (compile time) check that EthStorageTrie satisfies the node.Node interface. -var _ node.Node = (*EthStorageTrie)(nil) - -/* - INPUT -*/ - -// FromStorageTrieRLP takes the RLP representation of an ethereum -// storage trie node to return it as an IPLD node for further processing. -func FromStorageTrieRLP(raw []byte) (*EthStorageTrie, error) { - c, err := RawdataToCid(MEthStorageTrie, raw, multihash.KECCAK_256) - if err != nil { - return nil, err - } - - // Let's run the whole mile and process the nodeKind and - // its elements, in case somebody would need this function - // to parse an RLP element from the filesystem - return DecodeEthStorageTrie(c, raw) -} - -/* - OUTPUT -*/ - -// DecodeEthStorageTrie returns an EthStorageTrie object from its cid and rawdata. -func DecodeEthStorageTrie(c cid.Cid, b []byte) (*EthStorageTrie, error) { - tn, err := decodeTrieNode(c, b, decodeEthStorageTrieLeaf) - if err != nil { - return nil, err - } - return &EthStorageTrie{TrieNode: tn}, nil -} - -// decodeEthStorageTrieLeaf parses a eth-tx-trie leaf -// from decoded RLP elements -func decodeEthStorageTrieLeaf(i []interface{}) ([]interface{}, error) { - return []interface{}{ - i[0].([]byte), - i[1].([]byte), - }, nil -} - -/* - Block INTERFACE -*/ - -// RawData returns the binary of the RLP encode of the storage trie node. -func (st *EthStorageTrie) RawData() []byte { - return st.rawdata -} - -// Cid returns the cid of the storage trie node. -func (st *EthStorageTrie) Cid() cid.Cid { - return st.cid -} - -// String is a helper for output -func (st *EthStorageTrie) String() string { - return fmt.Sprintf("", st.cid) -} - -// Loggable returns in a map the type of IPLD Link. -func (st *EthStorageTrie) Loggable() map[string]interface{} { - return map[string]interface{}{ - "type": "eth-storage-trie", - } -} diff --git a/pkg/ipfs/ipld/eth_tx.go b/pkg/ipfs/ipld/eth_tx.go deleted file mode 100644 index 4fc4d20a..00000000 --- a/pkg/ipfs/ipld/eth_tx.go +++ /dev/null @@ -1,215 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package ipld - -import ( - "encoding/json" - "fmt" - "strconv" - - "github.com/ethereum/go-ethereum/common/hexutil" - "github.com/ethereum/go-ethereum/core/types" - "github.com/ethereum/go-ethereum/rlp" - "github.com/ipfs/go-cid" - node "github.com/ipfs/go-ipld-format" - mh "github.com/multiformats/go-multihash" -) - -// EthTx (eth-tx codec 0x93) represents an ethereum transaction -type EthTx struct { - *types.Transaction - - cid cid.Cid - rawdata []byte -} - -// Static (compile time) check that EthTx satisfies the node.Node interface. -var _ node.Node = (*EthTx)(nil) - -/* - INPUT -*/ - -// NewEthTx converts a *types.Transaction to an EthTx IPLD node -func NewEthTx(tx *types.Transaction) (*EthTx, error) { - txRLP, err := rlp.EncodeToBytes(tx) - if err != nil { - return nil, err - } - c, err := RawdataToCid(MEthTx, txRLP, mh.KECCAK_256) - if err != nil { - return nil, err - } - return &EthTx{ - Transaction: tx, - cid: c, - rawdata: txRLP, - }, nil -} - -/* - OUTPUT -*/ - -// DecodeEthTx takes a cid and its raw binary data -// from IPFS and returns an EthTx object for further processing. -func DecodeEthTx(c cid.Cid, b []byte) (*EthTx, error) { - var t *types.Transaction - if err := rlp.DecodeBytes(b, t); err != nil { - return nil, err - } - return &EthTx{ - Transaction: t, - cid: c, - rawdata: b, - }, nil -} - -/* - Block INTERFACE -*/ - -// RawData returns the binary of the RLP encode of the transaction. -func (t *EthTx) RawData() []byte { - return t.rawdata -} - -// Cid returns the cid of the transaction. -func (t *EthTx) Cid() cid.Cid { - return t.cid -} - -// String is a helper for output -func (t *EthTx) String() string { - return fmt.Sprintf("", t.cid) -} - -// Loggable returns in a map the type of IPLD Link. -func (t *EthTx) Loggable() map[string]interface{} { - return map[string]interface{}{ - "type": "eth-tx", - } -} - -/* - Node INTERFACE -*/ - -// Resolve resolves a path through this node, stopping at any link boundary -// and returning the object found as well as the remaining path to traverse -func (t *EthTx) Resolve(p []string) (interface{}, []string, error) { - if len(p) == 0 { - return t, nil, nil - } - - if len(p) > 1 { - return nil, nil, fmt.Errorf("unexpected path elements past %s", p[0]) - } - - switch p[0] { - - case "gas": - return t.Gas(), nil, nil - case "gasPrice": - return t.GasPrice(), nil, nil - case "input": - return fmt.Sprintf("%x", t.Data()), nil, nil - case "nonce": - return t.Nonce(), nil, nil - case "r": - _, r, _ := t.RawSignatureValues() - return hexutil.EncodeBig(r), nil, nil - case "s": - _, _, s := t.RawSignatureValues() - return hexutil.EncodeBig(s), nil, nil - case "toAddress": - return t.To(), nil, nil - case "v": - v, _, _ := t.RawSignatureValues() - return hexutil.EncodeBig(v), nil, nil - case "value": - return hexutil.EncodeBig(t.Value()), nil, nil - default: - return nil, nil, fmt.Errorf("no such link") - } -} - -// Tree lists all paths within the object under 'path', and up to the given depth. -// To list the entire object (similar to `find .`) pass "" and -1 -func (t *EthTx) Tree(p string, depth int) []string { - if p != "" || depth == 0 { - return nil - } - return []string{"gas", "gasPrice", "input", "nonce", "r", "s", "toAddress", "v", "value"} -} - -// ResolveLink is a helper function that calls resolve and asserts the -// output is a link -func (t *EthTx) ResolveLink(p []string) (*node.Link, []string, error) { - obj, rest, err := t.Resolve(p) - if err != nil { - return nil, nil, err - } - - if lnk, ok := obj.(*node.Link); ok { - return lnk, rest, nil - } - - return nil, nil, fmt.Errorf("resolved item was not a link") -} - -// Copy will go away. It is here to comply with the interface. -func (t *EthTx) Copy() node.Node { - panic("implement me") -} - -// Links is a helper function that returns all links within this object -func (t *EthTx) Links() []*node.Link { - return nil -} - -// Stat will go away. It is here to comply with the interface. -func (t *EthTx) Stat() (*node.NodeStat, error) { - return &node.NodeStat{}, nil -} - -// Size will go away. It is here to comply with the interface. -func (t *EthTx) Size() (uint64, error) { - return strconv.ParseUint(t.Transaction.Size().String(), 10, 64) -} - -/* - EthTx functions -*/ - -// MarshalJSON processes the transaction into readable JSON format. -func (t *EthTx) MarshalJSON() ([]byte, error) { - v, r, s := t.RawSignatureValues() - - out := map[string]interface{}{ - "gas": t.Gas(), - "gasPrice": hexutil.EncodeBig(t.GasPrice()), - "input": fmt.Sprintf("%x", t.Data()), - "nonce": t.Nonce(), - "r": hexutil.EncodeBig(r), - "s": hexutil.EncodeBig(s), - "toAddress": t.To(), - "v": hexutil.EncodeBig(v), - "value": hexutil.EncodeBig(t.Value()), - } - return json.Marshal(out) -} diff --git a/pkg/ipfs/ipld/eth_tx_trie.go b/pkg/ipfs/ipld/eth_tx_trie.go deleted file mode 100644 index 6f106f6d..00000000 --- a/pkg/ipfs/ipld/eth_tx_trie.go +++ /dev/null @@ -1,152 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package ipld - -import ( - "fmt" - - "github.com/ipfs/go-cid" - node "github.com/ipfs/go-ipld-format" - "github.com/multiformats/go-multihash" - - "github.com/ethereum/go-ethereum/core/types" - "github.com/ethereum/go-ethereum/rlp" -) - -// EthTxTrie (eth-tx-trie codec 0x92) represents -// a node from the transaction trie in ethereum. -type EthTxTrie struct { - *TrieNode -} - -// Static (compile time) check that EthTxTrie satisfies the node.Node interface. -var _ node.Node = (*EthTxTrie)(nil) - -/* - INPUT -*/ - -// To create a proper trie of the eth-tx-trie objects, it is required -// to input all transactions belonging to a forest in a single step. -// We are adding the transactions, and creating its trie on -// block body parsing time. - -/* - OUTPUT -*/ - -// DecodeEthTxTrie returns an EthTxTrie object from its cid and rawdata. -func DecodeEthTxTrie(c cid.Cid, b []byte) (*EthTxTrie, error) { - tn, err := decodeTrieNode(c, b, decodeEthTxTrieLeaf) - if err != nil { - return nil, err - } - return &EthTxTrie{TrieNode: tn}, nil -} - -// decodeEthTxTrieLeaf parses a eth-tx-trie leaf -//from decoded RLP elements -func decodeEthTxTrieLeaf(i []interface{}) ([]interface{}, error) { - var t types.Transaction - err := rlp.DecodeBytes(i[1].([]byte), &t) - if err != nil { - return nil, err - } - c, err := RawdataToCid(MEthTx, i[1].([]byte), multihash.KECCAK_256) - if err != nil { - return nil, err - } - return []interface{}{ - i[0].([]byte), - &EthTx{ - Transaction: &t, - cid: c, - rawdata: i[1].([]byte), - }, - }, nil -} - -/* - Block INTERFACE -*/ - -// RawData returns the binary of the RLP encode of the transaction. -func (t *EthTxTrie) RawData() []byte { - return t.rawdata -} - -// Cid returns the cid of the transaction. -func (t *EthTxTrie) Cid() cid.Cid { - return t.cid -} - -// String is a helper for output -func (t *EthTxTrie) String() string { - return fmt.Sprintf("", t.cid) -} - -// Loggable returns in a map the type of IPLD Link. -func (t *EthTxTrie) Loggable() map[string]interface{} { - return map[string]interface{}{ - "type": "eth-tx-trie", - } -} - -/* - EthTxTrie functions -*/ - -// txTrie wraps a localTrie for use on the transaction trie. -type txTrie struct { - *localTrie -} - -// newTxTrie initializes and returns a txTrie. -func newTxTrie() *txTrie { - return &txTrie{ - localTrie: newLocalTrie(), - } -} - -// getNodes invokes the localTrie, which computes the root hash of the -// transaction trie and returns its database keys, to return a slice -// of EthTxTrie nodes. -func (tt *txTrie) getNodes() []*EthTxTrie { - keys := tt.getKeys() - var out []*EthTxTrie - it := tt.trie.NodeIterator([]byte{}) - for it.Next(true) { - - } - for _, k := range keys { - rawdata, err := tt.db.Get(k) - if err != nil { - panic(err) - } - c, err := RawdataToCid(MEthTxTrie, rawdata, multihash.KECCAK_256) - if err != nil { - return nil - } - tn := &TrieNode{ - cid: c, - rawdata: rawdata, - } - out = append(out, &EthTxTrie{TrieNode: tn}) - } - - return out -} diff --git a/pkg/ipfs/ipld/shared.go b/pkg/ipfs/ipld/shared.go deleted file mode 100644 index e8358f7b..00000000 --- a/pkg/ipfs/ipld/shared.go +++ /dev/null @@ -1,151 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package ipld - -import ( - "bytes" - - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/core/rawdb" - "github.com/ethereum/go-ethereum/ethdb" - "github.com/ethereum/go-ethereum/rlp" - "github.com/ethereum/go-ethereum/trie" - "github.com/ipfs/go-cid" - mh "github.com/multiformats/go-multihash" -) - -// IPLD Codecs for Ethereum -// See the authoritative document: -// https://github.com/multiformats/multicodec/blob/master/table.csv -const ( - RawBinary = 0x55 - MEthHeader = 0x90 - MEthHeaderList = 0x91 - MEthTxTrie = 0x92 - MEthTx = 0x93 - MEthTxReceiptTrie = 0x94 - MEthTxReceipt = 0x95 - MEthStateTrie = 0x96 - MEthAccountSnapshot = 0x97 - MEthStorageTrie = 0x98 - MBitcoinHeader = 0xb0 - MBitcoinTx = 0xb1 -) - -// RawdataToCid takes the desired codec and a slice of bytes -// and returns the proper cid of the object. -func RawdataToCid(codec uint64, rawdata []byte, multiHash uint64) (cid.Cid, error) { - c, err := cid.Prefix{ - Codec: codec, - Version: 1, - MhType: multiHash, - MhLength: -1, - }.Sum(rawdata) - if err != nil { - return cid.Cid{}, err - } - return c, nil -} - -// keccak256ToCid takes a keccak256 hash and returns its cid based on -// the codec given. -func keccak256ToCid(codec uint64, h []byte) cid.Cid { - buf, err := mh.Encode(h, mh.KECCAK_256) - if err != nil { - panic(err) - } - - return cid.NewCidV1(codec, mh.Multihash(buf)) -} - -// commonHashToCid takes a go-ethereum common.Hash and returns its -// cid based on the codec given, -func commonHashToCid(codec uint64, h common.Hash) cid.Cid { - mhash, err := mh.Encode(h[:], mh.KECCAK_256) - if err != nil { - panic(err) - } - - return cid.NewCidV1(codec, mhash) -} - -// sha256ToCid takes a sha246 hash and returns its cid based on the -// codec given -func sha256ToCid(codec uint64, h []byte) cid.Cid { - hash, err := mh.Encode(h, mh.DBL_SHA2_256) - if err != nil { - panic(err) - } - - return cid.NewCidV1(codec, hash) -} - -// getRLP encodes the given object to RLP returning its bytes. -func getRLP(object interface{}) []byte { - buf := new(bytes.Buffer) - if err := rlp.Encode(buf, object); err != nil { - panic(err) - } - - return buf.Bytes() -} - -// localTrie wraps a go-ethereum trie and its underlying memory db. -// It contributes to the creation of the trie node objects. -type localTrie struct { - keys [][]byte - db ethdb.Database - trie *trie.Trie -} - -// newLocalTrie initializes and returns a localTrie object -func newLocalTrie() *localTrie { - var err error - lt := &localTrie{} - lt.db = rawdb.NewMemoryDatabase() - lt.trie, err = trie.New(common.Hash{}, trie.NewDatabase(lt.db)) - if err != nil { - panic(err) - } - return lt -} - -// add receives the index of an object and its rawdata value -// and includes it into the localTrie -func (lt *localTrie) add(idx int, rawdata []byte) { - key, err := rlp.EncodeToBytes(uint(idx)) - if err != nil { - panic(err) - } - lt.keys = append(lt.keys, key) - if err := lt.db.Put(key, rawdata); err != nil { - panic(err) - } - lt.trie.Update(key, rawdata) -} - -// rootHash returns the computed trie root. -// Useful for sanity checks on parsed data. -func (lt *localTrie) rootHash() []byte { - return lt.trie.Hash().Bytes() -} - -// getKeys returns the stored keys of the memory database -// of the localTrie for further processing. -func (lt *localTrie) getKeys() [][]byte { - return lt.keys -} diff --git a/pkg/ipfs/ipld/trie_node.go b/pkg/ipfs/ipld/trie_node.go deleted file mode 100644 index 788f76db..00000000 --- a/pkg/ipfs/ipld/trie_node.go +++ /dev/null @@ -1,444 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package ipld - -import ( - "encoding/json" - "fmt" - - "github.com/ethereum/go-ethereum/rlp" - "github.com/ipfs/go-cid" - node "github.com/ipfs/go-ipld-format" -) - -// TrieNode is the general abstraction for -//ethereum IPLD trie nodes. -type TrieNode struct { - // leaf, extension or branch - nodeKind string - - // If leaf or extension: [0] is key, [1] is val. - // If branch: [0] - [16] are children. - elements []interface{} - - // IPLD block information - cid cid.Cid - rawdata []byte -} - -/* - OUTPUT -*/ - -type trieNodeLeafDecoder func([]interface{}) ([]interface{}, error) - -// decodeTrieNode returns a TrieNode object from an IPLD block's -// cid and rawdata. -func decodeTrieNode(c cid.Cid, b []byte, - leafDecoder trieNodeLeafDecoder) (*TrieNode, error) { - var ( - i, decoded, elements []interface{} - nodeKind string - err error - ) - - if err = rlp.DecodeBytes(b, &i); err != nil { - return nil, err - } - - codec := c.Type() - switch len(i) { - case 2: - nodeKind, decoded, err = decodeCompactKey(i) - if err != nil { - return nil, err - } - - if nodeKind == "extension" { - elements, err = parseTrieNodeExtension(decoded, codec) - } - if nodeKind == "leaf" { - elements, err = leafDecoder(decoded) - } - if nodeKind != "extension" && nodeKind != "leaf" { - return nil, fmt.Errorf("unexpected nodeKind returned from decoder") - } - case 17: - nodeKind = "branch" - elements, err = parseTrieNodeBranch(i, codec) - if err != nil { - return nil, err - } - default: - return nil, fmt.Errorf("unknown trie node type") - } - - return &TrieNode{ - nodeKind: nodeKind, - elements: elements, - rawdata: b, - cid: c, - }, nil -} - -// decodeCompactKey takes a compact key, and returns its nodeKind and value. -func decodeCompactKey(i []interface{}) (string, []interface{}, error) { - first := i[0].([]byte) - last := i[1].([]byte) - - switch first[0] / 16 { - case '\x00': - return "extension", []interface{}{ - nibbleToByte(first)[2:], - last, - }, nil - case '\x01': - return "extension", []interface{}{ - nibbleToByte(first)[1:], - last, - }, nil - case '\x02': - return "leaf", []interface{}{ - nibbleToByte(first)[2:], - last, - }, nil - case '\x03': - return "leaf", []interface{}{ - nibbleToByte(first)[1:], - last, - }, nil - default: - return "", nil, fmt.Errorf("unknown hex prefix") - } -} - -// parseTrieNodeExtension helper improves readability -func parseTrieNodeExtension(i []interface{}, codec uint64) ([]interface{}, error) { - return []interface{}{ - i[0].([]byte), - keccak256ToCid(codec, i[1].([]byte)), - }, nil -} - -// parseTrieNodeBranch helper improves readability -func parseTrieNodeBranch(i []interface{}, codec uint64) ([]interface{}, error) { - var out []interface{} - - for i, vi := range i { - v, ok := vi.([]byte) - // Sometimes this throws "panic: interface conversion: interface {} is []interface {}, not []uint8" - // Figure out why, and if it is okay to continue - if !ok { - return nil, fmt.Errorf("unable to decode branch node entry into []byte at position: %d value: %+v", i, vi) - } - - switch len(v) { - case 0: - out = append(out, nil) - case 32: - out = append(out, keccak256ToCid(codec, v)) - default: - return nil, fmt.Errorf("unrecognized object: %v", v) - } - } - - return out, nil -} - -/* - Node INTERFACE -*/ - -// Resolve resolves a path through this node, stopping at any link boundary -// and returning the object found as well as the remaining path to traverse -func (t *TrieNode) Resolve(p []string) (interface{}, []string, error) { - switch t.nodeKind { - case "extension": - return t.resolveTrieNodeExtension(p) - case "leaf": - return t.resolveTrieNodeLeaf(p) - case "branch": - return t.resolveTrieNodeBranch(p) - default: - return nil, nil, fmt.Errorf("nodeKind case not implemented") - } -} - -// Tree lists all paths within the object under 'path', and up to the given depth. -// To list the entire object (similar to `find .`) pass "" and -1 -func (t *TrieNode) Tree(p string, depth int) []string { - if p != "" || depth == 0 { - return nil - } - - var out []string - - switch t.nodeKind { - case "extension": - var val string - for _, e := range t.elements[0].([]byte) { - val += fmt.Sprintf("%x", e) - } - return []string{val} - case "branch": - for i, elem := range t.elements { - if _, ok := elem.(*cid.Cid); ok { - out = append(out, fmt.Sprintf("%x", i)) - } - } - return out - - default: - return nil - } -} - -// ResolveLink is a helper function that calls resolve and asserts the -// output is a link -func (t *TrieNode) ResolveLink(p []string) (*node.Link, []string, error) { - obj, rest, err := t.Resolve(p) - if err != nil { - return nil, nil, err - } - - lnk, ok := obj.(*node.Link) - if !ok { - return nil, nil, fmt.Errorf("was not a link") - } - - return lnk, rest, nil -} - -// Copy will go away. It is here to comply with the interface. -func (t *TrieNode) Copy() node.Node { - panic("dont use this yet") -} - -// Links is a helper function that returns all links within this object -func (t *TrieNode) Links() []*node.Link { - var out []*node.Link - - for _, i := range t.elements { - c, ok := i.(cid.Cid) - if ok { - out = append(out, &node.Link{Cid: c}) - } - } - - return out -} - -// Stat will go away. It is here to comply with the interface. -func (t *TrieNode) Stat() (*node.NodeStat, error) { - return &node.NodeStat{}, nil -} - -// Size will go away. It is here to comply with the interface. -func (t *TrieNode) Size() (uint64, error) { - return 0, nil -} - -/* - TrieNode functions -*/ - -// MarshalJSON processes the transaction trie into readable JSON format. -func (t *TrieNode) MarshalJSON() ([]byte, error) { - var out map[string]interface{} - - switch t.nodeKind { - case "extension": - fallthrough - case "leaf": - var hexPrefix string - for _, e := range t.elements[0].([]byte) { - hexPrefix += fmt.Sprintf("%x", e) - } - - // if we got a byte we need to do this casting otherwise - // it will be marshaled to a base64 encoded value - if _, ok := t.elements[1].([]byte); ok { - var hexVal string - for _, e := range t.elements[1].([]byte) { - hexVal += fmt.Sprintf("%x", e) - } - - t.elements[1] = hexVal - } - - out = map[string]interface{}{ - "type": t.nodeKind, - hexPrefix: t.elements[1], - } - - case "branch": - out = map[string]interface{}{ - "type": "branch", - "0": t.elements[0], - "1": t.elements[1], - "2": t.elements[2], - "3": t.elements[3], - "4": t.elements[4], - "5": t.elements[5], - "6": t.elements[6], - "7": t.elements[7], - "8": t.elements[8], - "9": t.elements[9], - "a": t.elements[10], - "b": t.elements[11], - "c": t.elements[12], - "d": t.elements[13], - "e": t.elements[14], - "f": t.elements[15], - } - default: - return nil, fmt.Errorf("nodeKind %s not supported", t.nodeKind) - } - - return json.Marshal(out) -} - -// nibbleToByte expands the nibbles of a byte slice into their own bytes. -func nibbleToByte(k []byte) []byte { - var out []byte - - for _, b := range k { - out = append(out, b/16) - out = append(out, b%16) - } - - return out -} - -// Resolve reading conveniences -func (t *TrieNode) resolveTrieNodeExtension(p []string) (interface{}, []string, error) { - nibbles := t.elements[0].([]byte) - idx, rest := shiftFromPath(p, len(nibbles)) - if len(idx) < len(nibbles) { - return nil, nil, fmt.Errorf("not enough nibbles to traverse this extension") - } - - for _, i := range idx { - if getHexIndex(string(i)) == -1 { - return nil, nil, fmt.Errorf("invalid path element") - } - } - - for i, n := range nibbles { - if string(idx[i]) != fmt.Sprintf("%x", n) { - return nil, nil, fmt.Errorf("no such link in this extension") - } - } - - return &node.Link{Cid: t.elements[1].(cid.Cid)}, rest, nil -} - -func (t *TrieNode) resolveTrieNodeLeaf(p []string) (interface{}, []string, error) { - nibbles := t.elements[0].([]byte) - - if len(nibbles) != 0 { - idx, rest := shiftFromPath(p, len(nibbles)) - if len(idx) < len(nibbles) { - return nil, nil, fmt.Errorf("not enough nibbles to traverse this leaf") - } - - for _, i := range idx { - if getHexIndex(string(i)) == -1 { - return nil, nil, fmt.Errorf("invalid path element") - } - } - - for i, n := range nibbles { - if string(idx[i]) != fmt.Sprintf("%x", n) { - return nil, nil, fmt.Errorf("no such link in this extension") - } - } - - p = rest - } - - link, ok := t.elements[1].(node.Node) - if !ok { - return nil, nil, fmt.Errorf("leaf children is not an IPLD node") - } - - return link.Resolve(p) -} - -func (t *TrieNode) resolveTrieNodeBranch(p []string) (interface{}, []string, error) { - idx, rest := shiftFromPath(p, 1) - hidx := getHexIndex(idx) - if hidx == -1 { - return nil, nil, fmt.Errorf("incorrect path") - } - - child := t.elements[hidx] - if child != nil { - return &node.Link{Cid: child.(cid.Cid)}, rest, nil - } - return nil, nil, fmt.Errorf("no such link in this branch") -} - -// shiftFromPath extracts from a given path (as a slice of strings) -// the given number of elements as a single string, returning whatever -// it has not taken. -// -// Examples: -// ["0", "a", "something"] and 1 -> "0" and ["a", "something"] -// ["ab", "c", "d", "1"] and 2 -> "ab" and ["c", "d", "1"] -// ["abc", "d", "1"] and 2 -> "ab" and ["c", "d", "1"] -func shiftFromPath(p []string, i int) (string, []string) { - var ( - out string - rest []string - ) - - for _, pe := range p { - re := "" - for _, c := range pe { - if len(out) < i { - out += string(c) - } else { - re += string(c) - } - } - - if len(out) == i && re != "" { - rest = append(rest, re) - } - } - - return out, rest -} - -// getHexIndex returns to you the integer 0 - 15 equivalent to your -// string character if applicable, or -1 otherwise. -func getHexIndex(s string) int { - if len(s) != 1 { - return -1 - } - - c := byte(s[0]) - switch { - case '0' <= c && c <= '9': - return int(c - '0') - case 'a' <= c && c <= 'f': - return int(c - 'a' + 10) - } - - return -1 -} diff --git a/pkg/ipfs/models.go b/pkg/ipfs/models.go deleted file mode 100644 index eb0312be..00000000 --- a/pkg/ipfs/models.go +++ /dev/null @@ -1,22 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package ipfs - -type BlockModel struct { - CID string `db:"key"` - Data []byte `db:"data"` -} diff --git a/pkg/node/node.go b/pkg/node/node.go deleted file mode 100644 index 66f91d6d..00000000 --- a/pkg/node/node.go +++ /dev/null @@ -1,25 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package node - -type Node struct { - GenesisBlock string - NetworkID string - ChainID uint64 - ID string - ClientName string -} diff --git a/pkg/postgres/errors.go b/pkg/postgres/errors.go deleted file mode 100644 index eac04b8c..00000000 --- a/pkg/postgres/errors.go +++ /dev/null @@ -1,37 +0,0 @@ -package postgres - -import ( - "fmt" -) - -const ( - BeginTransactionFailedMsg = "failed to begin transaction" - DbConnectionFailedMsg = "db connection failed" - DeleteQueryFailedMsg = "delete query failed" - InsertQueryFailedMsg = "insert query failed" - SettingNodeFailedMsg = "unable to set db node" -) - -func ErrBeginTransactionFailed(beginErr error) error { - return formatError(BeginTransactionFailedMsg, beginErr.Error()) -} - -func ErrDBConnectionFailed(connectErr error) error { - return formatError(DbConnectionFailedMsg, connectErr.Error()) -} - -func ErrDBDeleteFailed(deleteErr error) error { - return formatError(DeleteQueryFailedMsg, deleteErr.Error()) -} - -func ErrDBInsertFailed(insertErr error) error { - return formatError(InsertQueryFailedMsg, insertErr.Error()) -} - -func ErrUnableToSetNode(setErr error) error { - return formatError(SettingNodeFailedMsg, setErr.Error()) -} - -func formatError(msg, err string) error { - return fmt.Errorf("%s: %s", msg, err) -} diff --git a/pkg/postgres/postgres.go b/pkg/postgres/postgres.go deleted file mode 100644 index 1e257568..00000000 --- a/pkg/postgres/postgres.go +++ /dev/null @@ -1,76 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package postgres - -import ( - "time" - - "github.com/jmoiron/sqlx" - _ "github.com/lib/pq" //postgres driver - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/config" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/node" -) - -type DB struct { - *sqlx.DB - Node node.Node - NodeID int64 -} - -func NewDB(databaseConfig config.Database, node node.Node) (*DB, error) { - connectString := config.DbConnectionString(databaseConfig) - db, connectErr := sqlx.Connect("postgres", connectString) - if connectErr != nil { - return &DB{}, ErrDBConnectionFailed(connectErr) - } - if databaseConfig.MaxOpen > 0 { - db.SetMaxOpenConns(databaseConfig.MaxOpen) - } - if databaseConfig.MaxIdle > 0 { - db.SetMaxIdleConns(databaseConfig.MaxIdle) - } - if databaseConfig.MaxLifetime > 0 { - lifetime := time.Duration(databaseConfig.MaxLifetime) * time.Second - db.SetConnMaxLifetime(lifetime) - } - pg := DB{DB: db, Node: node} - nodeErr := pg.CreateNode(&node) - if nodeErr != nil { - return &DB{}, ErrUnableToSetNode(nodeErr) - } - return &pg, nil -} - -func (db *DB) CreateNode(node *node.Node) error { - var nodeID int64 - err := db.QueryRow( - `INSERT INTO nodes (genesis_block, network_id, node_id, client_name) - VALUES ($1, $2, $3, $4) - ON CONFLICT (genesis_block, network_id, node_id) - DO UPDATE - SET genesis_block = $1, - network_id = $2, - node_id = $3, - client_name = $4 - RETURNING id`, - node.GenesisBlock, node.NetworkID, node.ID, node.ClientName).Scan(&nodeID) - if err != nil { - return ErrUnableToSetNode(err) - } - db.NodeID = nodeID - return nil -} diff --git a/pkg/postgres/postgres_suite_test.go b/pkg/postgres/postgres_suite_test.go deleted file mode 100644 index 8a991b39..00000000 --- a/pkg/postgres/postgres_suite_test.go +++ /dev/null @@ -1,36 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package postgres_test - -import ( - "io/ioutil" - "testing" - - log "github.com/sirupsen/logrus" - - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" -) - -func init() { - log.SetOutput(ioutil.Discard) -} - -func TestPostgres(t *testing.T) { - RegisterFailHandler(Fail) - RunSpecs(t, "Postgres Suite") -} diff --git a/pkg/postgres/postgres_test.go b/pkg/postgres/postgres_test.go deleted file mode 100644 index f4faa090..00000000 --- a/pkg/postgres/postgres_test.go +++ /dev/null @@ -1,104 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package postgres_test - -import ( - "fmt" - "strings" - - "math/big" - - "github.com/jmoiron/sqlx" - _ "github.com/lib/pq" - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/config" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/node" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/postgres" - "github.com/vulcanize/ipfs-blockchain-watcher/test_config" -) - -var _ = Describe("Postgres DB", func() { - var sqlxdb *sqlx.DB - - It("connects to the database", func() { - var err error - pgConfig := config.DbConnectionString(test_config.DBConfig) - - sqlxdb, err = sqlx.Connect("postgres", pgConfig) - - Expect(err).Should(BeNil()) - Expect(sqlxdb).ShouldNot(BeNil()) - }) - - It("serializes big.Int to db", func() { - // postgres driver doesn't support go big.Int type - // various casts in golang uint64, int64, overflow for - // transaction value (in wei) even though - // postgres numeric can handle an arbitrary - // sized int, so use string representation of big.Int - // and cast on insert - - pgConnectString := config.DbConnectionString(test_config.DBConfig) - db, err := sqlx.Connect("postgres", pgConnectString) - Expect(err).NotTo(HaveOccurred()) - - bi := new(big.Int) - bi.SetString("34940183920000000000", 10) - Expect(bi.String()).To(Equal("34940183920000000000")) - - defer db.Exec(`DROP TABLE IF EXISTS example`) - _, err = db.Exec("CREATE TABLE example ( id INTEGER, data NUMERIC )") - Expect(err).ToNot(HaveOccurred()) - - sqlStatement := ` - INSERT INTO example (id, data) - VALUES (1, cast($1 AS NUMERIC))` - _, err = db.Exec(sqlStatement, bi.String()) - Expect(err).ToNot(HaveOccurred()) - - var data string - err = db.QueryRow(`SELECT data FROM example WHERE id = 1`).Scan(&data) - Expect(err).ToNot(HaveOccurred()) - - Expect(bi.String()).To(Equal(data)) - actual := new(big.Int) - actual.SetString(data, 10) - Expect(actual).To(Equal(bi)) - }) - - It("throws error when can't connect to the database", func() { - invalidDatabase := config.Database{} - node := node.Node{GenesisBlock: "GENESIS", NetworkID: "1", ID: "x123", ClientName: "geth"} - - _, err := postgres.NewDB(invalidDatabase, node) - - Expect(err).To(HaveOccurred()) - Expect(err.Error()).To(ContainSubstring(postgres.DbConnectionFailedMsg)) - }) - - It("throws error when can't create node", func() { - badHash := fmt.Sprintf("x %s", strings.Repeat("1", 100)) - node := node.Node{GenesisBlock: badHash, NetworkID: "1", ID: "x123", ClientName: "geth"} - - _, err := postgres.NewDB(test_config.DBConfig, node) - - Expect(err).To(HaveOccurred()) - Expect(err.Error()).To(ContainSubstring(postgres.SettingNodeFailedMsg)) - }) -}) diff --git a/pkg/resync/config.go b/pkg/resync/config.go deleted file mode 100644 index c68ad7f6..00000000 --- a/pkg/resync/config.go +++ /dev/null @@ -1,128 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package resync - -import ( - "fmt" - "time" - - "github.com/spf13/viper" - - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/config" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/node" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/postgres" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/shared" - "github.com/vulcanize/ipfs-blockchain-watcher/utils" -) - -// Env variables -const ( - RESYNC_CHAIN = "RESYNC_CHAIN" - RESYNC_START = "RESYNC_START" - RESYNC_STOP = "RESYNC_STOP" - RESYNC_BATCH_SIZE = "RESYNC_BATCH_SIZE" - RESYNC_BATCH_NUMBER = "RESYNC_BATCH_NUMBER" - RESYNC_CLEAR_OLD_CACHE = "RESYNC_CLEAR_OLD_CACHE" - RESYNC_TYPE = "RESYNC_TYPE" - RESYNC_RESET_VALIDATION = "RESYNC_RESET_VALIDATION" -) - -// Config holds the parameters needed to perform a resync -type Config struct { - Chain shared.ChainType // The type of resync to perform - ResyncType shared.DataType // The type of data to resync - ClearOldCache bool // Resync will first clear all the data within the range - ResetValidation bool // If true, resync will reset the validation level to 0 for the given range - - // DB info - DB *postgres.DB - DBConfig config.Database - - HTTPClient interface{} // Note this client is expected to support the retrieval of the specified data type(s) - NodeInfo node.Node // Info for the associated node - Ranges [][2]uint64 // The block height ranges to resync - BatchSize uint64 // BatchSize for the resync http calls (client has to support batch sizing) - Timeout time.Duration // HTTP connection timeout in seconds - BatchNumber uint64 -} - -// NewConfig fills and returns a resync config from toml parameters -func NewConfig() (*Config, error) { - c := new(Config) - var err error - - viper.BindEnv("resync.start", RESYNC_START) - viper.BindEnv("resync.stop", RESYNC_STOP) - viper.BindEnv("resync.clearOldCache", RESYNC_CLEAR_OLD_CACHE) - viper.BindEnv("resync.type", RESYNC_TYPE) - viper.BindEnv("resync.chain", RESYNC_CHAIN) - viper.BindEnv("ethereum.httpPath", shared.ETH_HTTP_PATH) - viper.BindEnv("bitcoin.httpPath", shared.BTC_HTTP_PATH) - viper.BindEnv("resync.batchSize", RESYNC_BATCH_SIZE) - viper.BindEnv("resync.batchNumber", RESYNC_BATCH_NUMBER) - viper.BindEnv("resync.resetValidation", RESYNC_RESET_VALIDATION) - viper.BindEnv("resync.timeout", shared.HTTP_TIMEOUT) - - timeout := viper.GetInt("resync.timeout") - if timeout < 5 { - timeout = 5 - } - c.Timeout = time.Second * time.Duration(timeout) - - start := uint64(viper.GetInt64("resync.start")) - stop := uint64(viper.GetInt64("resync.stop")) - c.Ranges = [][2]uint64{{start, stop}} - c.ClearOldCache = viper.GetBool("resync.clearOldCache") - c.ResetValidation = viper.GetBool("resync.resetValidation") - - resyncType := viper.GetString("resync.type") - c.ResyncType, err = shared.GenerateDataTypeFromString(resyncType) - if err != nil { - return nil, err - } - chain := viper.GetString("resync.chain") - c.Chain, err = shared.NewChainType(chain) - if err != nil { - return nil, err - } - if ok, err := shared.SupportedDataType(c.ResyncType, c.Chain); !ok { - if err != nil { - return nil, err - } - return nil, fmt.Errorf("chain type %s does not support data type %s", c.Chain.String(), c.ResyncType.String()) - } - - switch c.Chain { - case shared.Ethereum: - ethHTTP := viper.GetString("ethereum.httpPath") - c.NodeInfo, c.HTTPClient, err = shared.GetEthNodeAndClient(fmt.Sprintf("http://%s", ethHTTP)) - if err != nil { - return nil, err - } - case shared.Bitcoin: - btcHTTP := viper.GetString("bitcoin.httpPath") - c.NodeInfo, c.HTTPClient = shared.GetBtcNodeAndClient(btcHTTP) - } - - c.DBConfig.Init() - db := utils.LoadPostgres(c.DBConfig, c.NodeInfo) - c.DB = &db - - c.BatchSize = uint64(viper.GetInt64("resync.batchSize")) - c.BatchNumber = uint64(viper.GetInt64("resync.batchNumber")) - return c, nil -} diff --git a/pkg/resync/service.go b/pkg/resync/service.go deleted file mode 100644 index 1dc39753..00000000 --- a/pkg/resync/service.go +++ /dev/null @@ -1,174 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package resync - -import ( - "fmt" - - "github.com/sirupsen/logrus" - - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/builders" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/shared" - "github.com/vulcanize/ipfs-blockchain-watcher/utils" -) - -type Resync interface { - Resync() error -} - -type Service struct { - // Interface for converting payloads into IPLD object payloads - Converter shared.PayloadConverter - // Interface for publishing the IPLD payloads to IPFS - Publisher shared.IPLDPublisher - // Interface for searching and retrieving CIDs from Postgres index - Retriever shared.CIDRetriever - // Interface for fetching payloads over at historical blocks; over http - Fetcher shared.PayloadFetcher - // Interface for cleaning out data before resyncing (if clearOldCache is on) - Cleaner shared.Cleaner - // Size of batch fetches - BatchSize uint64 - // Number of goroutines - BatchNumber int64 - // Channel for receiving quit signal - quitChan chan bool - // Chain type - chain shared.ChainType - // Resync data type - data shared.DataType - // Resync ranges - ranges [][2]uint64 - // Flag to turn on or off old cache destruction - clearOldCache bool - // Flag to turn on or off validation level reset - resetValidation bool -} - -// NewResyncService creates and returns a resync service from the provided settings -func NewResyncService(settings *Config) (Resync, error) { - publisher, err := builders.NewIPLDPublisher(settings.Chain, settings.DB) - if err != nil { - return nil, err - } - converter, err := builders.NewPayloadConverter(settings.Chain, settings.NodeInfo.ChainID) - if err != nil { - return nil, err - } - retriever, err := builders.NewCIDRetriever(settings.Chain, settings.DB) - if err != nil { - return nil, err - } - fetcher, err := builders.NewPaylaodFetcher(settings.Chain, settings.HTTPClient, settings.Timeout) - if err != nil { - return nil, err - } - cleaner, err := builders.NewCleaner(settings.Chain, settings.DB) - if err != nil { - return nil, err - } - batchSize := settings.BatchSize - if batchSize == 0 { - batchSize = shared.DefaultMaxBatchSize - } - batchNumber := int64(settings.BatchNumber) - if batchNumber == 0 { - batchNumber = shared.DefaultMaxBatchNumber - } - return &Service{ - Converter: converter, - Publisher: publisher, - Retriever: retriever, - Fetcher: fetcher, - Cleaner: cleaner, - BatchSize: batchSize, - BatchNumber: int64(batchNumber), - quitChan: make(chan bool), - chain: settings.Chain, - ranges: settings.Ranges, - data: settings.ResyncType, - clearOldCache: settings.ClearOldCache, - resetValidation: settings.ResetValidation, - }, nil -} - -func (rs *Service) Resync() error { - if rs.resetValidation { - logrus.Infof("resetting validation level") - if err := rs.Cleaner.ResetValidation(rs.ranges); err != nil { - return fmt.Errorf("validation reset failed: %v", err) - } - } - if rs.clearOldCache { - logrus.Infof("cleaning out old data from Postgres") - if err := rs.Cleaner.Clean(rs.ranges, rs.data); err != nil { - return fmt.Errorf("%s %s data resync cleaning error: %v", rs.chain.String(), rs.data.String(), err) - } - } - // spin up worker goroutines - heightsChan := make(chan []uint64) - for i := 1; i <= int(rs.BatchNumber); i++ { - go rs.resync(i, heightsChan) - } - for _, rng := range rs.ranges { - if rng[1] < rng[0] { - logrus.Errorf("%s resync range ending block number needs to be greater than the starting block number", rs.chain.String()) - continue - } - logrus.Infof("resyncing %s data from %d to %d", rs.chain.String(), rng[0], rng[1]) - // break the range up into bins of smaller ranges - blockRangeBins, err := utils.GetBlockHeightBins(rng[0], rng[1], rs.BatchSize) - if err != nil { - return err - } - for _, heights := range blockRangeBins { - heightsChan <- heights - } - } - // send a quit signal to each worker - // this blocks until each worker has finished its current task and can receive from the quit channel - for i := 1; i <= int(rs.BatchNumber); i++ { - rs.quitChan <- true - } - return nil -} - -func (rs *Service) resync(id int, heightChan chan []uint64) { - for { - select { - case heights := <-heightChan: - logrus.Debugf("%s resync worker %d processing section from %d to %d", rs.chain.String(), id, heights[0], heights[len(heights)-1]) - payloads, err := rs.Fetcher.FetchAt(heights) - if err != nil { - logrus.Errorf("%s resync worker %d fetcher error: %s", rs.chain.String(), id, err.Error()) - } - for _, payload := range payloads { - ipldPayload, err := rs.Converter.Convert(payload) - if err != nil { - logrus.Errorf("%s resync worker %d converter error: %s", rs.chain.String(), id, err.Error()) - } - if err := rs.Publisher.Publish(ipldPayload); err != nil { - logrus.Errorf("%s resync worker %d publisher error: %s", rs.chain.String(), id, err.Error()) - } - } - logrus.Infof("%s resync worker %d finished section from %d to %d", rs.chain.String(), id, heights[0], heights[len(heights)-1]) - case <-rs.quitChan: - logrus.Infof("%s resync worker %d goroutine shutting down", rs.chain.String(), id) - return - } - } -} diff --git a/pkg/shared/data_type.go b/pkg/shared/data_type.go deleted file mode 100644 index d62694df..00000000 --- a/pkg/shared/data_type.go +++ /dev/null @@ -1,144 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package shared - -import ( - "fmt" - "strings" -) - -// DataType is an enum to loosely represent type of chain data -type DataType int - -const ( - UnknownDataType DataType = iota - 1 - Full - Headers - Uncles - Transactions - Receipts - State - Storage -) - -// String() method to resolve ReSyncType enum -func (r DataType) String() string { - switch r { - case Full: - return "full" - case Headers: - return "headers" - case Uncles: - return "uncles" - case Transactions: - return "transactions" - case Receipts: - return "receipts" - case State: - return "state" - case Storage: - return "storage" - default: - return "unknown" - } -} - -// GenerateDataTypeFromString -func GenerateDataTypeFromString(str string) (DataType, error) { - switch strings.ToLower(str) { - case "full", "f": - return Full, nil - case "headers", "header", "h": - return Headers, nil - case "uncles", "u": - return Uncles, nil - case "transactions", "transaction", "trxs", "txs", "trx", "tx", "t": - return Transactions, nil - case "receipts", "receipt", "rcts", "rct", "r": - return Receipts, nil - case "state": - return State, nil - case "storage": - return Storage, nil - default: - return UnknownDataType, fmt.Errorf("unrecognized resync type: %s", str) - } -} - -func SupportedDataType(d DataType, c ChainType) (bool, error) { - switch c { - case Ethereum: - switch d { - case Full: - return true, nil - case Headers: - return true, nil - case Uncles: - return true, nil - case Transactions: - return true, nil - case Receipts: - return true, nil - case State: - return true, nil - case Storage: - return true, nil - default: - return true, nil - } - case Bitcoin: - switch d { - case Full: - return true, nil - case Headers: - return true, nil - case Uncles: - return false, nil - case Transactions: - return true, nil - case Receipts: - return false, nil - case State: - return false, nil - case Storage: - return false, nil - default: - return false, nil - } - case Omni: - switch d { - case Full: - return false, nil - case Headers: - return false, nil - case Uncles: - return false, nil - case Transactions: - return false, nil - case Receipts: - return false, nil - case State: - return false, nil - case Storage: - return false, nil - default: - return false, nil - } - default: - return false, fmt.Errorf("unrecognized chain type %s", c.String()) - } -} diff --git a/pkg/shared/intefaces.go b/pkg/shared/intefaces.go deleted file mode 100644 index 03839b4d..00000000 --- a/pkg/shared/intefaces.go +++ /dev/null @@ -1,83 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package shared - -import ( - "math/big" -) - -// PayloadStreamer streams chain-specific payloads to the provided channel -type PayloadStreamer interface { - Stream(payloadChan chan RawChainData) (ClientSubscription, error) -} - -// PayloadFetcher fetches chain-specific payloads -type PayloadFetcher interface { - FetchAt(blockHeights []uint64) ([]RawChainData, error) -} - -// PayloadConverter converts chain-specific payloads into IPLD payloads for publishing -type PayloadConverter interface { - Convert(payload RawChainData) (ConvertedData, error) -} - -// IPLDPublisher publishes IPLD payloads and returns a CID payload for indexing -type IPLDPublisher interface { - Publish(payload ConvertedData) error -} - -// ResponseFilterer applies a filter to an IPLD payload to return a subscription response packet -type ResponseFilterer interface { - Filter(filter SubscriptionSettings, payload ConvertedData) (response IPLDs, err error) -} - -// CIDRetriever retrieves cids according to a provided filter and returns a CID wrapper -type CIDRetriever interface { - Retrieve(filter SubscriptionSettings, blockNumber int64) ([]CIDsForFetching, bool, error) - RetrieveFirstBlockNumber() (int64, error) - RetrieveLastBlockNumber() (int64, error) - RetrieveGapsInData(validationLevel int) ([]Gap, error) -} - -// IPLDFetcher uses a CID wrapper to fetch an IPLD wrapper -type IPLDFetcher interface { - Fetch(cids CIDsForFetching) (IPLDs, error) -} - -// ClientSubscription is a general interface for chain data subscriptions -type ClientSubscription interface { - Err() <-chan error - Unsubscribe() -} - -// Cleaner is for cleaning out data from the cache within the given ranges -type Cleaner interface { - Clean(rngs [][2]uint64, t DataType) error - ResetValidation(rngs [][2]uint64) error -} - -// SubscriptionSettings is the interface every subscription filter type needs to satisfy, no matter the chain -// Further specifics of the underlying filter type depend on the internal needs of the types -// which satisfy the ResponseFilterer and CIDRetriever interfaces for a specific chain -// The underlying type needs to be rlp serializable -type SubscriptionSettings interface { - StartingBlock() *big.Int - EndingBlock() *big.Int - ChainType() ChainType - HistoricalData() bool - HistoricalDataOnly() bool -} diff --git a/pkg/shared/types.go b/pkg/shared/types.go deleted file mode 100644 index 7dd52f30..00000000 --- a/pkg/shared/types.go +++ /dev/null @@ -1,41 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package shared - -// Very loose interface types for generic processing of different blockchains -// TODO: split different blockchain support into separate repos - -// These types serve as very loose wrappers around a generic underlying interface{} -type RawChainData interface{} - -// The concrete type underneath StreamedIPLDs should not be a pointer -type ConvertedData interface { - Height() int64 -} - -type CIDsForIndexing interface{} - -type CIDsForFetching interface{} - -type IPLDs interface { - Height() int64 -} - -type Gap struct { - Start uint64 - Stop uint64 -} diff --git a/pkg/watch/service_test.go b/pkg/watch/service_test.go deleted file mode 100644 index d2902c88..00000000 --- a/pkg/watch/service_test.go +++ /dev/null @@ -1,72 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package watch_test - -import ( - "sync" - "time" - - "github.com/ethereum/go-ethereum/rpc" - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/eth/mocks" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/shared" - mocks2 "github.com/vulcanize/ipfs-blockchain-watcher/pkg/shared/mocks" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/watch" -) - -var _ = Describe("Service", func() { - Describe("Sync", func() { - It("Streams statediff.Payloads, converts them to IPLDPayloads, publishes IPLDPayloads, and indexes CIDPayloads", func() { - wg := new(sync.WaitGroup) - payloadChan := make(chan shared.RawChainData, 1) - quitChan := make(chan bool, 1) - mockPublisher := &mocks.IPLDPublisher{ - ReturnCIDPayload: mocks.MockCIDPayload, - ReturnErr: nil, - } - mockStreamer := &mocks2.PayloadStreamer{ - ReturnSub: &rpc.ClientSubscription{}, - StreamPayloads: []shared.RawChainData{ - mocks.MockStateDiffPayload, - }, - ReturnErr: nil, - } - mockConverter := &mocks.PayloadConverter{ - ReturnIPLDPayload: mocks.MockConvertedPayload, - ReturnErr: nil, - } - processor := &watch.Service{ - Publisher: mockPublisher, - Streamer: mockStreamer, - Converter: mockConverter, - PayloadChan: payloadChan, - QuitChan: quitChan, - WorkerPoolSize: 1, - } - err := processor.Sync(wg, nil) - Expect(err).ToNot(HaveOccurred()) - time.Sleep(2 * time.Second) - close(quitChan) - wg.Wait() - Expect(mockConverter.PassedStatediffPayload).To(Equal(mocks.MockStateDiffPayload)) - Expect(mockPublisher.PassedIPLDPayload).To(Equal(mocks.MockConvertedPayload)) - Expect(mockStreamer.PassedPayloadChan).To(Equal(payloadChan)) - }) - }) -}) diff --git a/pkg/watch/watch_suite_test.go b/pkg/watch/watch_suite_test.go deleted file mode 100644 index 821ae69b..00000000 --- a/pkg/watch/watch_suite_test.go +++ /dev/null @@ -1,35 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package watch_test - -import ( - "io/ioutil" - "testing" - - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - "github.com/sirupsen/logrus" -) - -func TestIPFSWatcher(t *testing.T) { - RegisterFailHandler(Fail) - RunSpecs(t, "IPFS Watcher Suite Test") -} - -var _ = BeforeSuite(func() { - logrus.SetOutput(ioutil.Discard) -}) diff --git a/temp_rsa.enc b/temp_rsa.enc deleted file mode 100644 index 0f4c243f1daa2b404cbf7415076488ebaadf1cf5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1856 zcmV-G2fz3`Mrb@enbey4o)oXp%1&?0g^f)msgvNISpyw;Xei2T>5=3QdI8Y>b!z*O zLmQ$=J0?ADCUHKv+8_Ur`vzhC_N?QD!VP?+-}|S>RVmz7n1Y04k;@`*=znvkwqidr)GlQgSSF| z0+Q`P+LnYYi3A?5BRKV2-W2A8O!bXfwFe|wVq-P;VkR)0Z`BOgfy7_M)nI~sr>BPP z>}FWLK!>LbK&HLU9q|*()8-#?0pDB|mI6K1(J~oAmxXi|_E|NNyRK~E30)^g8uKCt z%y?+%@Ed{*5?qT}#^)I0(CkeBIOt$+Z=whg?LXsUKm*zqj~xb&IQd{&WTU42KO+VFo8DDRvuf~X`ST?V6#wRf!iD=s~X`lI#xfz z)f8nac#o1a=c?@L>QL#ZrP!Pt>y&+a*LGL&7f1$wj?0h0s$6olj5;YYU9Nn5>@SHV zgtUHb`OMc%M!X?c4rI4b(WlL8a|q5*v$tYli9Sl~whS<`(O%m|!L8YQh;OA#2A697 zc1x^ywBah=$|iui{g}KYx)>dO{Ax>@^PL4!TCH%q)Vb7z*D*NyAHLqLPOE+W`nnSB zL1RRzzn+51*Ul$W@jy~7-y`N_{is&fhrz&tG$gOcWagbo)yGcVk5_FVr;jE@K#3=c zj!aR^q6T|wsL#Pjt8`YUu~c-jR{O-M0pe*_-=Rr)mYZqNzM`aW^cs*OT#{eR$`foX zQNfAf3_gJMrVo`~G4_m8(_ z2Jlox(^8+Kf|#@BT6InI`C<8i#?;Xz4^rRu{ywlk6T({8H>{rbHa`fsH8P!*?RwdA zC<>KFw}gHZBRo*$2oVGNP~ocsUAP%vFA6g<8~q%g%ODsP)m4Q{^ts>>cJ=H2M2O_lXZ%PpC0n}0_n(6I6}tR84+SloVLxR zb3f1`3a{BS*EK`vw^xFxpk^Gc-|AvZB1)xtYY6J;b|5f}{ z^OIn8%gx1R3qs<00-2{rJgdO+=F};@U=~1iO)w7$uV_xVPU>9Bu^r(UBh1Zuhf4!^ zeqiJpgd$A9l7gP$Co)%5Q#xD9m{~z{&ob@4rb#FB1G&Z^vSe0QD9{v;kiFqtV4X`rHjyv)cPeV!zMwM zF0Xf5N1b$-yLlQ1n~<==bfBtdh)0MveqP6B^Q4B`*0qvi`E zOcP1sbZ&pR^4o7mXu~u;6SxW^ba~0vsYwujOyy=CxW3OqNEWx!lw?<)28!1(l!a7g;V?y0qZ=piuXitj=z z!K~|Hxq0`aSV1+ydlPt+891O-%(~}vIwffYHV5BYb03+U#t0W7I&d5?#m1Xy Date: Mon, 31 Aug 2020 10:47:06 -0500 Subject: [PATCH 02/12] decouple from sync --- pkg/builders/builders.go | 6 +- pkg/client/client.go | 8 +- pkg/eth/api.go | 7 +- pkg/eth/api_test.go | 13 +- pkg/eth/backend.go | 4 +- pkg/eth/cid_retriever.go | 117 ++++++------- pkg/eth/cid_retriever_test.go | 249 +++++++++++++--------------- pkg/eth/eth_suite_test.go | 2 +- pkg/eth/filterer.go | 64 ++++--- pkg/eth/filterer_test.go | 50 +++--- pkg/eth/ipld_fetcher.go | 54 +++--- pkg/eth/ipld_fetcher_test.go | 15 +- pkg/eth/mocks/converter.go | 4 +- pkg/eth/mocks/indexer.go | 4 +- pkg/eth/mocks/publisher.go | 4 +- pkg/eth/subscription_config.go | 27 --- pkg/eth/test_helpers.go | 5 +- pkg/shared/env.go | 55 +----- pkg/shared/mocks/payload_fetcher.go | 2 +- pkg/shared/mocks/retriever.go | 2 +- pkg/shared/mocks/streamer.go | 2 +- pkg/shared/test_helpers.go | 9 +- pkg/watch/api.go | 32 +--- pkg/watch/config.go | 151 +++++------------ pkg/watch/service.go | 249 ++++++++-------------------- 25 files changed, 419 insertions(+), 716 deletions(-) diff --git a/pkg/builders/builders.go b/pkg/builders/builders.go index 226dc2ef..f40c526e 100644 --- a/pkg/builders/builders.go +++ b/pkg/builders/builders.go @@ -24,10 +24,10 @@ import ( "github.com/btcsuite/btcd/rpcclient" "github.com/ethereum/go-ethereum/rpc" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/btc" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/eth" "github.com/vulcanize/ipfs-blockchain-watcher/pkg/postgres" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/shared" + "github.com/vulcanize/ipld-eth-server/pkg/btc" + "github.com/vulcanize/ipld-eth-server/pkg/eth" + "github.com/vulcanize/ipld-eth-server/pkg/shared" ) // NewResponseFilterer constructs a ResponseFilterer for the provided chain type diff --git a/pkg/client/client.go b/pkg/client/client.go index 5f365525..590e0166 100644 --- a/pkg/client/client.go +++ b/pkg/client/client.go @@ -14,7 +14,7 @@ // You should have received a copy of the GNU Affero General Public License // along with this program. If not, see . -// Client is used by watchers to stream chain IPLD data from a vulcanizedb ipfs-blockchain-watcher +// Client is used by watchers to stream chain IPLD data from a vulcanizedb ipld-eth-server package client import ( @@ -22,10 +22,10 @@ import ( "github.com/ethereum/go-ethereum/rpc" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/watch" + "github.com/vulcanize/ipld-eth-server/pkg/watch" ) -// Client is used to subscribe to the ipfs-blockchain-watcher ipld data stream +// Client is used to subscribe to the ipld-eth-server ipld data stream type Client struct { c *rpc.Client } @@ -37,7 +37,7 @@ func NewClient(c *rpc.Client) *Client { } } -// Stream is the main loop for subscribing to iplds from an ipfs-blockchain-watcher server +// Stream is the main loop for subscribing to iplds from an ipld-eth-server server func (c *Client) Stream(payloadChan chan watch.SubscriptionPayload, rlpParams []byte) (*rpc.ClientSubscription, error) { return c.c.Subscribe(context.Background(), "vdb", payloadChan, "stream", rlpParams) } diff --git a/pkg/eth/api.go b/pkg/eth/api.go index 6c9c40bd..491a13a3 100644 --- a/pkg/eth/api.go +++ b/pkg/eth/api.go @@ -20,7 +20,8 @@ import ( "context" "math/big" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/shared" + "github.com/vulcanize/ipfs-blockchain-watcher/pkg/eth" + "github.com/vulcanize/ipld-eth-server/pkg/shared" "github.com/ethereum/go-ethereum" "github.com/ethereum/go-ethereum/common" @@ -127,7 +128,7 @@ func (pea *PublicEthAPI) GetLogs(ctx context.Context, crit ethereum.FilterQuery) } start := startingBlock.Int64() end := endingBlock.Int64() - allRctCIDs := make([]ReceiptModel, 0) + allRctCIDs := make([]eth.ReceiptModel, 0) for i := start; i <= end; i++ { rctCIDs, err := pea.B.Retriever.RetrieveRctCIDs(tx, filter, i, nil, nil) if err != nil { @@ -181,7 +182,7 @@ func (pea *PublicEthAPI) GetBlockByHash(ctx context.Context, hash common.Hash, f } // GetTransactionByHash returns the transaction for the given hash -// eth ipfs-blockchain-watcher cannot currently handle pending/tx_pool txs +// eth ipld-eth-server cannot currently handle pending/tx_pool txs func (pea *PublicEthAPI) GetTransactionByHash(ctx context.Context, hash common.Hash) (*RPCTransaction, error) { // Try to return an already finalized transaction tx, blockHash, blockNumber, index, err := pea.B.GetTransaction(ctx, hash) diff --git a/pkg/eth/api_test.go b/pkg/eth/api_test.go index f41b3b44..e48fff48 100644 --- a/pkg/eth/api_test.go +++ b/pkg/eth/api_test.go @@ -21,19 +21,20 @@ import ( "strconv" "github.com/ethereum/go-ethereum" - "github.com/ethereum/go-ethereum/core/types" - "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common/hexutil" + "github.com/ethereum/go-ethereum/core/types" "github.com/ethereum/go-ethereum/rpc" . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/eth" + eth2 "github.com/vulcanize/ipfs-blockchain-watcher/pkg/eth" "github.com/vulcanize/ipfs-blockchain-watcher/pkg/eth/mocks" "github.com/vulcanize/ipfs-blockchain-watcher/pkg/postgres" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/shared" + + "github.com/vulcanize/ipld-eth-server/pkg/eth" + "github.com/vulcanize/ipld-eth-server/pkg/shared" ) var ( @@ -85,7 +86,7 @@ var _ = Describe("API", func() { db *postgres.DB retriever *eth.CIDRetriever fetcher *eth.IPLDFetcher - indexAndPublisher *eth.IPLDPublisher + indexAndPublisher *eth2.IPLDPublisher backend *eth.Backend api *eth.PublicEthAPI ) @@ -95,7 +96,7 @@ var _ = Describe("API", func() { Expect(err).ToNot(HaveOccurred()) retriever = eth.NewCIDRetriever(db) fetcher = eth.NewIPLDFetcher(db) - indexAndPublisher = eth.NewIPLDPublisher(db) + indexAndPublisher = eth2.NewIPLDPublisher(db) backend = ð.Backend{ Retriever: retriever, Fetcher: fetcher, diff --git a/pkg/eth/backend.go b/pkg/eth/backend.go index 082f8dbe..cf972e15 100644 --- a/pkg/eth/backend.go +++ b/pkg/eth/backend.go @@ -32,7 +32,7 @@ import ( "github.com/vulcanize/ipfs-blockchain-watcher/pkg/ipfs" "github.com/vulcanize/ipfs-blockchain-watcher/pkg/postgres" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/shared" + "github.com/vulcanize/ipld-eth-server/pkg/shared" ) var ( @@ -120,7 +120,7 @@ func (b *Backend) GetLogs(ctx context.Context, hash common.Hash) ([][]*types.Log } // BlockByNumber returns the requested canonical block. -// Since the ipfs-blockchain-watcher database can contain forked blocks, it is recommended to fetch BlockByHash as +// Since the ipld-eth-server database can contain forked blocks, it is recommended to fetch BlockByHash as // fetching by number can return non-deterministic results (returns the first block found at that height) func (b *Backend) BlockByNumber(ctx context.Context, blockNumber rpc.BlockNumber) (*types.Block, error) { var err error diff --git a/pkg/eth/cid_retriever.go b/pkg/eth/cid_retriever.go index 14698b70..e62ecffe 100644 --- a/pkg/eth/cid_retriever.go +++ b/pkg/eth/cid_retriever.go @@ -27,11 +27,20 @@ import ( "github.com/lib/pq" log "github.com/sirupsen/logrus" + eth2 "github.com/vulcanize/ipfs-blockchain-watcher/pkg/eth" "github.com/vulcanize/ipfs-blockchain-watcher/pkg/postgres" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/shared" - "github.com/vulcanize/ipfs-blockchain-watcher/utils" + + "github.com/vulcanize/ipld-eth-server/pkg/shared" + "github.com/vulcanize/ipld-eth-server/utils" ) +// Retriever interface for substituting mocks in tests +type Retriever interface { + RetrieveFirstBlockNumber() (int64, error) + RetrieveLastBlockNumber() (int64, error) + Retrieve(filter SubscriptionSettings, blockNumber int64) ([]eth2.CIDWrapper, bool, error) +} + // CIDRetriever satisfies the CIDRetriever interface for ethereum type CIDRetriever struct { db *postgres.DB @@ -59,11 +68,7 @@ func (ecr *CIDRetriever) RetrieveLastBlockNumber() (int64, error) { } // Retrieve is used to retrieve all of the CIDs which conform to the passed StreamFilters -func (ecr *CIDRetriever) Retrieve(filter shared.SubscriptionSettings, blockNumber int64) ([]shared.CIDsForFetching, bool, error) { - streamFilter, ok := filter.(*SubscriptionSettings) - if !ok { - return nil, true, fmt.Errorf("eth retriever expected filter type %T got %T", &SubscriptionSettings{}, filter) - } +func (ecr *CIDRetriever) Retrieve(filter SubscriptionSettings, blockNumber int64) ([]eth2.CIDWrapper, bool, error) { log.Debug("retrieving cids") // Begin new db tx @@ -88,15 +93,15 @@ func (ecr *CIDRetriever) Retrieve(filter shared.SubscriptionSettings, blockNumbe log.Error("header cid retrieval error") return nil, true, err } - cws := make([]shared.CIDsForFetching, len(headers)) + cws := make([]eth2.CIDWrapper, len(headers)) empty := true for i, header := range headers { - cw := new(CIDWrapper) + cw := new(eth2.CIDWrapper) cw.BlockNumber = big.NewInt(blockNumber) - if !streamFilter.HeaderFilter.Off { + if !filter.HeaderFilter.Off { cw.Header = header empty = false - if streamFilter.HeaderFilter.Uncles { + if filter.HeaderFilter.Uncles { // Retrieve uncle cids for this header id uncleCIDs, err := ecr.RetrieveUncleCIDsByHeaderID(tx, header.ID) if err != nil { @@ -107,8 +112,8 @@ func (ecr *CIDRetriever) Retrieve(filter shared.SubscriptionSettings, blockNumbe } } // Retrieve cached trx CIDs - if !streamFilter.TxFilter.Off { - cw.Transactions, err = ecr.RetrieveTxCIDs(tx, streamFilter.TxFilter, header.ID) + if !filter.TxFilter.Off { + cw.Transactions, err = ecr.RetrieveTxCIDs(tx, filter.TxFilter, header.ID) if err != nil { log.Error("transaction cid retrieval error") return nil, true, err @@ -122,8 +127,8 @@ func (ecr *CIDRetriever) Retrieve(filter shared.SubscriptionSettings, blockNumbe trxIds[j] = tx.ID } // Retrieve cached receipt CIDs - if !streamFilter.ReceiptFilter.Off { - cw.Receipts, err = ecr.RetrieveRctCIDsByHeaderID(tx, streamFilter.ReceiptFilter, header.ID, trxIds) + if !filter.ReceiptFilter.Off { + cw.Receipts, err = ecr.RetrieveRctCIDsByHeaderID(tx, filter.ReceiptFilter, header.ID, trxIds) if err != nil { log.Error("receipt cid retrieval error") return nil, true, err @@ -133,8 +138,8 @@ func (ecr *CIDRetriever) Retrieve(filter shared.SubscriptionSettings, blockNumbe } } // Retrieve cached state CIDs - if !streamFilter.StateFilter.Off { - cw.StateNodes, err = ecr.RetrieveStateCIDs(tx, streamFilter.StateFilter, header.ID) + if !filter.StateFilter.Off { + cw.StateNodes, err = ecr.RetrieveStateCIDs(tx, filter.StateFilter, header.ID) if err != nil { log.Error("state cid retrieval error") return nil, true, err @@ -144,8 +149,8 @@ func (ecr *CIDRetriever) Retrieve(filter shared.SubscriptionSettings, blockNumbe } } // Retrieve cached storage CIDs - if !streamFilter.StorageFilter.Off { - cw.StorageNodes, err = ecr.RetrieveStorageCIDs(tx, streamFilter.StorageFilter, header.ID) + if !filter.StorageFilter.Off { + cw.StorageNodes, err = ecr.RetrieveStorageCIDs(tx, filter.StorageFilter, header.ID) if err != nil { log.Error("storage cid retrieval error") return nil, true, err @@ -154,25 +159,25 @@ func (ecr *CIDRetriever) Retrieve(filter shared.SubscriptionSettings, blockNumbe empty = false } } - cws[i] = cw + cws[i] = *cw } return cws, empty, err } // RetrieveHeaderCIDs retrieves and returns all of the header cids at the provided blockheight -func (ecr *CIDRetriever) RetrieveHeaderCIDs(tx *sqlx.Tx, blockNumber int64) ([]HeaderModel, error) { +func (ecr *CIDRetriever) RetrieveHeaderCIDs(tx *sqlx.Tx, blockNumber int64) ([]eth2.HeaderModel, error) { log.Debug("retrieving header cids for block ", blockNumber) - headers := make([]HeaderModel, 0) + headers := make([]eth2.HeaderModel, 0) pgStr := `SELECT * FROM eth.header_cids WHERE block_number = $1` return headers, tx.Select(&headers, pgStr, blockNumber) } // RetrieveUncleCIDsByHeaderID retrieves and returns all of the uncle cids for the provided header -func (ecr *CIDRetriever) RetrieveUncleCIDsByHeaderID(tx *sqlx.Tx, headerID int64) ([]UncleModel, error) { +func (ecr *CIDRetriever) RetrieveUncleCIDsByHeaderID(tx *sqlx.Tx, headerID int64) ([]eth2.UncleModel, error) { log.Debug("retrieving uncle cids for block id ", headerID) - headers := make([]UncleModel, 0) + headers := make([]eth2.UncleModel, 0) pgStr := `SELECT * FROM eth.uncle_cids WHERE header_id = $1` return headers, tx.Select(&headers, pgStr, headerID) @@ -180,10 +185,10 @@ func (ecr *CIDRetriever) RetrieveUncleCIDsByHeaderID(tx *sqlx.Tx, headerID int64 // RetrieveTxCIDs retrieves and returns all of the trx cids at the provided blockheight that conform to the provided filter parameters // also returns the ids for the returned transaction cids -func (ecr *CIDRetriever) RetrieveTxCIDs(tx *sqlx.Tx, txFilter TxFilter, headerID int64) ([]TxModel, error) { +func (ecr *CIDRetriever) RetrieveTxCIDs(tx *sqlx.Tx, txFilter TxFilter, headerID int64) ([]eth2.TxModel, error) { log.Debug("retrieving transaction cids for header id ", headerID) args := make([]interface{}, 0, 3) - results := make([]TxModel, 0) + results := make([]eth2.TxModel, 0) id := 1 pgStr := fmt.Sprintf(`SELECT transaction_cids.id, transaction_cids.header_id, transaction_cids.tx_hash, transaction_cids.cid, transaction_cids.mh_key, @@ -208,7 +213,7 @@ func (ecr *CIDRetriever) RetrieveTxCIDs(tx *sqlx.Tx, txFilter TxFilter, headerID // RetrieveRctCIDsByHeaderID retrieves and returns all of the rct cids at the provided header ID that conform to the provided // filter parameters and correspond to the provided tx ids -func (ecr *CIDRetriever) RetrieveRctCIDsByHeaderID(tx *sqlx.Tx, rctFilter ReceiptFilter, headerID int64, trxIds []int64) ([]ReceiptModel, error) { +func (ecr *CIDRetriever) RetrieveRctCIDsByHeaderID(tx *sqlx.Tx, rctFilter ReceiptFilter, headerID int64, trxIds []int64) ([]eth2.ReceiptModel, error) { log.Debug("retrieving receipt cids for header id ", headerID) args := make([]interface{}, 0, 4) pgStr := `SELECT receipt_cids.id, receipt_cids.tx_id, receipt_cids.cid, receipt_cids.mh_key, @@ -282,13 +287,13 @@ func (ecr *CIDRetriever) RetrieveRctCIDsByHeaderID(tx *sqlx.Tx, rctFilter Receip } } pgStr += ` ORDER BY transaction_cids.index` - receiptCids := make([]ReceiptModel, 0) + receiptCids := make([]eth2.ReceiptModel, 0) return receiptCids, tx.Select(&receiptCids, pgStr, args...) } // RetrieveRctCIDs retrieves and returns all of the rct cids at the provided blockheight or block hash that conform to the provided // filter parameters and correspond to the provided tx ids -func (ecr *CIDRetriever) RetrieveRctCIDs(tx *sqlx.Tx, rctFilter ReceiptFilter, blockNumber int64, blockHash *common.Hash, trxIds []int64) ([]ReceiptModel, error) { +func (ecr *CIDRetriever) RetrieveRctCIDs(tx *sqlx.Tx, rctFilter ReceiptFilter, blockNumber int64, blockHash *common.Hash, trxIds []int64) ([]eth2.ReceiptModel, error) { log.Debug("retrieving receipt cids for block ", blockNumber) args := make([]interface{}, 0, 5) pgStr := `SELECT receipt_cids.id, receipt_cids.tx_id, receipt_cids.cid, receipt_cids.mh_key, @@ -370,7 +375,7 @@ func (ecr *CIDRetriever) RetrieveRctCIDs(tx *sqlx.Tx, rctFilter ReceiptFilter, b } } pgStr += ` ORDER BY transaction_cids.index` - receiptCids := make([]ReceiptModel, 0) + receiptCids := make([]eth2.ReceiptModel, 0) return receiptCids, tx.Select(&receiptCids, pgStr, args...) } @@ -384,7 +389,7 @@ func hasTopics(topics [][]string) bool { } // RetrieveStateCIDs retrieves and returns all of the state node cids at the provided header ID that conform to the provided filter parameters -func (ecr *CIDRetriever) RetrieveStateCIDs(tx *sqlx.Tx, stateFilter StateFilter, headerID int64) ([]StateNodeModel, error) { +func (ecr *CIDRetriever) RetrieveStateCIDs(tx *sqlx.Tx, stateFilter StateFilter, headerID int64) ([]eth2.StateNodeModel, error) { log.Debug("retrieving state cids for header id ", headerID) args := make([]interface{}, 0, 2) pgStr := `SELECT state_cids.id, state_cids.header_id, @@ -404,12 +409,12 @@ func (ecr *CIDRetriever) RetrieveStateCIDs(tx *sqlx.Tx, stateFilter StateFilter, if !stateFilter.IntermediateNodes { pgStr += ` AND state_cids.node_type = 2` } - stateNodeCIDs := make([]StateNodeModel, 0) + stateNodeCIDs := make([]eth2.StateNodeModel, 0) return stateNodeCIDs, tx.Select(&stateNodeCIDs, pgStr, args...) } // RetrieveStorageCIDs retrieves and returns all of the storage node cids at the provided header id that conform to the provided filter parameters -func (ecr *CIDRetriever) RetrieveStorageCIDs(tx *sqlx.Tx, storageFilter StorageFilter, headerID int64) ([]StorageNodeWithStateKeyModel, error) { +func (ecr *CIDRetriever) RetrieveStorageCIDs(tx *sqlx.Tx, storageFilter StorageFilter, headerID int64) ([]eth2.StorageNodeWithStateKeyModel, error) { log.Debug("retrieving storage cids for header id ", headerID) args := make([]interface{}, 0, 3) pgStr := `SELECT storage_cids.id, storage_cids.state_id, storage_cids.storage_leaf_key, storage_cids.node_type, @@ -437,23 +442,23 @@ func (ecr *CIDRetriever) RetrieveStorageCIDs(tx *sqlx.Tx, storageFilter StorageF if !storageFilter.IntermediateNodes { pgStr += ` AND storage_cids.node_type = 2` } - storageNodeCIDs := make([]StorageNodeWithStateKeyModel, 0) + storageNodeCIDs := make([]eth2.StorageNodeWithStateKeyModel, 0) return storageNodeCIDs, tx.Select(&storageNodeCIDs, pgStr, args...) } // RetrieveGapsInData is used to find the the block numbers at which we are missing data in the db // it finds the union of heights where no data exists and where the times_validated is lower than the validation level -func (ecr *CIDRetriever) RetrieveGapsInData(validationLevel int) ([]shared.Gap, error) { +func (ecr *CIDRetriever) RetrieveGapsInData(validationLevel int) ([]eth2.DBGap, error) { log.Info("searching for gaps in the eth ipfs watcher database") startingBlock, err := ecr.RetrieveFirstBlockNumber() if err != nil { return nil, fmt.Errorf("eth CIDRetriever RetrieveFirstBlockNumber error: %v", err) } - var initialGap []shared.Gap + var initialGap []eth2.DBGap if startingBlock != 0 { stop := uint64(startingBlock - 1) log.Infof("found gap at the beginning of the eth sync from 0 to %d", stop) - initialGap = []shared.Gap{{ + initialGap = []eth2.DBGap{{ Start: 0, Stop: stop, }} @@ -471,9 +476,9 @@ func (ecr *CIDRetriever) RetrieveGapsInData(validationLevel int) ([]shared.Gap, if err := ecr.db.Select(&results, pgStr); err != nil && err != sql.ErrNoRows { return nil, err } - emptyGaps := make([]shared.Gap, len(results)) + emptyGaps := make([]eth2.DBGap, len(results)) for i, res := range results { - emptyGaps[i] = shared.Gap{ + emptyGaps[i] = eth2.DBGap{ Start: res.Start, Stop: res.Stop, } @@ -492,13 +497,13 @@ func (ecr *CIDRetriever) RetrieveGapsInData(validationLevel int) ([]shared.Gap, } // RetrieveBlockByHash returns all of the CIDs needed to compose an entire block, for a given block hash -func (ecr *CIDRetriever) RetrieveBlockByHash(blockHash common.Hash) (HeaderModel, []UncleModel, []TxModel, []ReceiptModel, error) { +func (ecr *CIDRetriever) RetrieveBlockByHash(blockHash common.Hash) (eth2.HeaderModel, []eth2.UncleModel, []eth2.TxModel, []eth2.ReceiptModel, error) { log.Debug("retrieving block cids for block hash ", blockHash.String()) // Begin new db tx tx, err := ecr.db.Beginx() if err != nil { - return HeaderModel{}, nil, nil, nil, err + return eth2.HeaderModel{}, nil, nil, nil, err } defer func() { if p := recover(); p != nil { @@ -514,17 +519,17 @@ func (ecr *CIDRetriever) RetrieveBlockByHash(blockHash common.Hash) (HeaderModel headerCID, err := ecr.RetrieveHeaderCIDByHash(tx, blockHash) if err != nil { log.Error("header cid retrieval error") - return HeaderModel{}, nil, nil, nil, err + return eth2.HeaderModel{}, nil, nil, nil, err } uncleCIDs, err := ecr.RetrieveUncleCIDsByHeaderID(tx, headerCID.ID) if err != nil { log.Error("uncle cid retrieval error") - return HeaderModel{}, nil, nil, nil, err + return eth2.HeaderModel{}, nil, nil, nil, err } txCIDs, err := ecr.RetrieveTxCIDsByHeaderID(tx, headerCID.ID) if err != nil { log.Error("tx cid retrieval error") - return HeaderModel{}, nil, nil, nil, err + return eth2.HeaderModel{}, nil, nil, nil, err } txIDs := make([]int64, len(txCIDs)) for i, txCID := range txCIDs { @@ -538,13 +543,13 @@ func (ecr *CIDRetriever) RetrieveBlockByHash(blockHash common.Hash) (HeaderModel } // RetrieveBlockByNumber returns all of the CIDs needed to compose an entire block, for a given block number -func (ecr *CIDRetriever) RetrieveBlockByNumber(blockNumber int64) (HeaderModel, []UncleModel, []TxModel, []ReceiptModel, error) { +func (ecr *CIDRetriever) RetrieveBlockByNumber(blockNumber int64) (eth2.HeaderModel, []eth2.UncleModel, []eth2.TxModel, []eth2.ReceiptModel, error) { log.Debug("retrieving block cids for block number ", blockNumber) // Begin new db tx tx, err := ecr.db.Beginx() if err != nil { - return HeaderModel{}, nil, nil, nil, err + return eth2.HeaderModel{}, nil, nil, nil, err } defer func() { if p := recover(); p != nil { @@ -560,20 +565,20 @@ func (ecr *CIDRetriever) RetrieveBlockByNumber(blockNumber int64) (HeaderModel, headerCID, err := ecr.RetrieveHeaderCIDs(tx, blockNumber) if err != nil { log.Error("header cid retrieval error") - return HeaderModel{}, nil, nil, nil, err + return eth2.HeaderModel{}, nil, nil, nil, err } if len(headerCID) < 1 { - return HeaderModel{}, nil, nil, nil, fmt.Errorf("header cid retrieval error, no header CIDs found at block %d", blockNumber) + return eth2.HeaderModel{}, nil, nil, nil, fmt.Errorf("header cid retrieval error, no header CIDs found at block %d", blockNumber) } uncleCIDs, err := ecr.RetrieveUncleCIDsByHeaderID(tx, headerCID[0].ID) if err != nil { log.Error("uncle cid retrieval error") - return HeaderModel{}, nil, nil, nil, err + return eth2.HeaderModel{}, nil, nil, nil, err } txCIDs, err := ecr.RetrieveTxCIDsByHeaderID(tx, headerCID[0].ID) if err != nil { log.Error("tx cid retrieval error") - return HeaderModel{}, nil, nil, nil, err + return eth2.HeaderModel{}, nil, nil, nil, err } txIDs := make([]int64, len(txCIDs)) for i, txCID := range txCIDs { @@ -587,26 +592,26 @@ func (ecr *CIDRetriever) RetrieveBlockByNumber(blockNumber int64) (HeaderModel, } // RetrieveHeaderCIDByHash returns the header for the given block hash -func (ecr *CIDRetriever) RetrieveHeaderCIDByHash(tx *sqlx.Tx, blockHash common.Hash) (HeaderModel, error) { +func (ecr *CIDRetriever) RetrieveHeaderCIDByHash(tx *sqlx.Tx, blockHash common.Hash) (eth2.HeaderModel, error) { log.Debug("retrieving header cids for block hash ", blockHash.String()) pgStr := `SELECT * FROM eth.header_cids WHERE block_hash = $1` - var headerCID HeaderModel + var headerCID eth2.HeaderModel return headerCID, tx.Get(&headerCID, pgStr, blockHash.String()) } // RetrieveTxCIDsByHeaderID retrieves all tx CIDs for the given header id -func (ecr *CIDRetriever) RetrieveTxCIDsByHeaderID(tx *sqlx.Tx, headerID int64) ([]TxModel, error) { +func (ecr *CIDRetriever) RetrieveTxCIDsByHeaderID(tx *sqlx.Tx, headerID int64) ([]eth2.TxModel, error) { log.Debug("retrieving tx cids for block id ", headerID) pgStr := `SELECT * FROM eth.transaction_cids WHERE header_id = $1 ORDER BY index` - var txCIDs []TxModel + var txCIDs []eth2.TxModel return txCIDs, tx.Select(&txCIDs, pgStr, headerID) } // RetrieveReceiptCIDsByTxIDs retrieves receipt CIDs by their associated tx IDs -func (ecr *CIDRetriever) RetrieveReceiptCIDsByTxIDs(tx *sqlx.Tx, txIDs []int64) ([]ReceiptModel, error) { +func (ecr *CIDRetriever) RetrieveReceiptCIDsByTxIDs(tx *sqlx.Tx, txIDs []int64) ([]eth2.ReceiptModel, error) { log.Debugf("retrieving receipt cids for tx ids %v", txIDs) pgStr := `SELECT receipt_cids.id, receipt_cids.tx_id, receipt_cids.cid, receipt_cids.mh_key, receipt_cids.contract, receipt_cids.contract_hash, receipt_cids.topic0s, receipt_cids.topic1s, @@ -615,6 +620,6 @@ func (ecr *CIDRetriever) RetrieveReceiptCIDsByTxIDs(tx *sqlx.Tx, txIDs []int64) WHERE tx_id = ANY($1::INTEGER[]) AND receipt_cids.tx_id = transaction_cids.id ORDER BY transaction_cids.index` - var rctCIDs []ReceiptModel + var rctCIDs []eth2.ReceiptModel return rctCIDs, tx.Select(&rctCIDs, pgStr, pq.Array(txIDs)) } diff --git a/pkg/eth/cid_retriever_test.go b/pkg/eth/cid_retriever_test.go index 20e2abc0..24ef1e7f 100644 --- a/pkg/eth/cid_retriever_test.go +++ b/pkg/eth/cid_retriever_test.go @@ -19,22 +19,21 @@ package eth_test import ( "math/big" - "github.com/ethereum/go-ethereum/core/types" - "github.com/ethereum/go-ethereum/common" - + "github.com/ethereum/go-ethereum/core/types" . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/eth" eth2 "github.com/vulcanize/ipfs-blockchain-watcher/pkg/eth" "github.com/vulcanize/ipfs-blockchain-watcher/pkg/eth/mocks" "github.com/vulcanize/ipfs-blockchain-watcher/pkg/postgres" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/shared" + + "github.com/vulcanize/ipld-eth-server/pkg/eth" + "github.com/vulcanize/ipld-eth-server/pkg/shared" ) var ( - openFilter = ð.SubscriptionSettings{ + openFilter = eth.SubscriptionSettings{ Start: big.NewInt(0), End: big.NewInt(1), HeaderFilter: eth.HeaderFilter{}, @@ -43,7 +42,7 @@ var ( StateFilter: eth.StateFilter{}, StorageFilter: eth.StorageFilter{}, } - rctAddressFilter = ð.SubscriptionSettings{ + rctAddressFilter = eth.SubscriptionSettings{ Start: big.NewInt(0), End: big.NewInt(1), HeaderFilter: eth.HeaderFilter{ @@ -62,7 +61,7 @@ var ( Off: true, }, } - rctTopicsFilter = ð.SubscriptionSettings{ + rctTopicsFilter = eth.SubscriptionSettings{ Start: big.NewInt(0), End: big.NewInt(1), HeaderFilter: eth.HeaderFilter{ @@ -81,7 +80,7 @@ var ( Off: true, }, } - rctTopicsAndAddressFilter = ð.SubscriptionSettings{ + rctTopicsAndAddressFilter = eth.SubscriptionSettings{ Start: big.NewInt(0), End: big.NewInt(1), HeaderFilter: eth.HeaderFilter{ @@ -104,7 +103,7 @@ var ( Off: true, }, } - rctTopicsAndAddressFilterFail = ð.SubscriptionSettings{ + rctTopicsAndAddressFilterFail = eth.SubscriptionSettings{ Start: big.NewInt(0), End: big.NewInt(1), HeaderFilter: eth.HeaderFilter{ @@ -127,7 +126,7 @@ var ( Off: true, }, } - rctAddressesAndTopicFilter = ð.SubscriptionSettings{ + rctAddressesAndTopicFilter = eth.SubscriptionSettings{ Start: big.NewInt(0), End: big.NewInt(1), HeaderFilter: eth.HeaderFilter{ @@ -147,7 +146,7 @@ var ( Off: true, }, } - rctsForAllCollectedTrxs = ð.SubscriptionSettings{ + rctsForAllCollectedTrxs = eth.SubscriptionSettings{ Start: big.NewInt(0), End: big.NewInt(1), HeaderFilter: eth.HeaderFilter{ @@ -166,7 +165,7 @@ var ( Off: true, }, } - rctsForSelectCollectedTrxs = ð.SubscriptionSettings{ + rctsForSelectCollectedTrxs = eth.SubscriptionSettings{ Start: big.NewInt(0), End: big.NewInt(1), HeaderFilter: eth.HeaderFilter{ @@ -187,7 +186,7 @@ var ( Off: true, }, } - stateFilter = ð.SubscriptionSettings{ + stateFilter = eth.SubscriptionSettings{ Start: big.NewInt(0), End: big.NewInt(1), HeaderFilter: eth.HeaderFilter{ @@ -212,14 +211,14 @@ var _ = Describe("Retriever", func() { var ( db *postgres.DB repo *eth2.IPLDPublisher - retriever *eth2.CIDRetriever + retriever *eth.CIDRetriever ) BeforeEach(func() { var err error db, err = shared.SetupDB() Expect(err).ToNot(HaveOccurred()) repo = eth2.NewIPLDPublisher(db) - retriever = eth2.NewCIDRetriever(db) + retriever = eth.NewCIDRetriever(db) }) AfterEach(func() { eth.TearDownDB(db) @@ -235,23 +234,21 @@ var _ = Describe("Retriever", func() { Expect(err).ToNot(HaveOccurred()) Expect(empty).ToNot(BeTrue()) Expect(len(cids)).To(Equal(1)) - cidWrapper, ok := cids[0].(*eth.CIDWrapper) - Expect(ok).To(BeTrue()) - Expect(cidWrapper.BlockNumber).To(Equal(mocks.MockCIDWrapper.BlockNumber)) + Expect(cids[0].BlockNumber).To(Equal(mocks.MockCIDWrapper.BlockNumber)) expectedHeaderCID := mocks.MockCIDWrapper.Header - expectedHeaderCID.ID = cidWrapper.Header.ID - expectedHeaderCID.NodeID = cidWrapper.Header.NodeID - Expect(cidWrapper.Header).To(Equal(expectedHeaderCID)) - Expect(len(cidWrapper.Transactions)).To(Equal(3)) - Expect(eth.TxModelsContainsCID(cidWrapper.Transactions, mocks.MockCIDWrapper.Transactions[0].CID)).To(BeTrue()) - Expect(eth.TxModelsContainsCID(cidWrapper.Transactions, mocks.MockCIDWrapper.Transactions[1].CID)).To(BeTrue()) - Expect(eth.TxModelsContainsCID(cidWrapper.Transactions, mocks.MockCIDWrapper.Transactions[2].CID)).To(BeTrue()) - Expect(len(cidWrapper.Receipts)).To(Equal(3)) - Expect(eth.ReceiptModelsContainsCID(cidWrapper.Receipts, mocks.MockCIDWrapper.Receipts[0].CID)).To(BeTrue()) - Expect(eth.ReceiptModelsContainsCID(cidWrapper.Receipts, mocks.MockCIDWrapper.Receipts[1].CID)).To(BeTrue()) - Expect(eth.ReceiptModelsContainsCID(cidWrapper.Receipts, mocks.MockCIDWrapper.Receipts[2].CID)).To(BeTrue()) - Expect(len(cidWrapper.StateNodes)).To(Equal(2)) - for _, stateNode := range cidWrapper.StateNodes { + expectedHeaderCID.ID = cids[0].Header.ID + expectedHeaderCID.NodeID = cids[0].Header.NodeID + Expect(cids[0].Header).To(Equal(expectedHeaderCID)) + Expect(len(cids[0].Transactions)).To(Equal(3)) + Expect(eth.TxModelsContainsCID(cids[0].Transactions, mocks.MockCIDWrapper.Transactions[0].CID)).To(BeTrue()) + Expect(eth.TxModelsContainsCID(cids[0].Transactions, mocks.MockCIDWrapper.Transactions[1].CID)).To(BeTrue()) + Expect(eth.TxModelsContainsCID(cids[0].Transactions, mocks.MockCIDWrapper.Transactions[2].CID)).To(BeTrue()) + Expect(len(cids[0].Receipts)).To(Equal(3)) + Expect(eth.ReceiptModelsContainsCID(cids[0].Receipts, mocks.MockCIDWrapper.Receipts[0].CID)).To(BeTrue()) + Expect(eth.ReceiptModelsContainsCID(cids[0].Receipts, mocks.MockCIDWrapper.Receipts[1].CID)).To(BeTrue()) + Expect(eth.ReceiptModelsContainsCID(cids[0].Receipts, mocks.MockCIDWrapper.Receipts[2].CID)).To(BeTrue()) + Expect(len(cids[0].StateNodes)).To(Equal(2)) + for _, stateNode := range cids[0].StateNodes { if stateNode.CID == mocks.State1CID.String() { Expect(stateNode.StateKey).To(Equal(common.BytesToHash(mocks.ContractLeafKey).Hex())) Expect(stateNode.NodeType).To(Equal(2)) @@ -263,11 +260,11 @@ var _ = Describe("Retriever", func() { Expect(stateNode.Path).To(Equal([]byte{'\x0c'})) } } - Expect(len(cidWrapper.StorageNodes)).To(Equal(1)) + Expect(len(cids[0].StorageNodes)).To(Equal(1)) expectedStorageNodeCIDs := mocks.MockCIDWrapper.StorageNodes - expectedStorageNodeCIDs[0].ID = cidWrapper.StorageNodes[0].ID - expectedStorageNodeCIDs[0].StateID = cidWrapper.StorageNodes[0].StateID - Expect(cidWrapper.StorageNodes).To(Equal(expectedStorageNodeCIDs)) + expectedStorageNodeCIDs[0].ID = cids[0].StorageNodes[0].ID + expectedStorageNodeCIDs[0].StateID = cids[0].StorageNodes[0].StateID + Expect(cids[0].StorageNodes).To(Equal(expectedStorageNodeCIDs)) }) It("Applies filters from the provided config.Subscription", func() { @@ -275,125 +272,111 @@ var _ = Describe("Retriever", func() { Expect(err).ToNot(HaveOccurred()) Expect(empty).ToNot(BeTrue()) Expect(len(cids1)).To(Equal(1)) - cidWrapper1, ok := cids1[0].(*eth.CIDWrapper) - Expect(ok).To(BeTrue()) - Expect(cidWrapper1.BlockNumber).To(Equal(mocks.MockCIDWrapper.BlockNumber)) - Expect(cidWrapper1.Header).To(Equal(eth.HeaderModel{})) - Expect(len(cidWrapper1.Transactions)).To(Equal(0)) - Expect(len(cidWrapper1.StateNodes)).To(Equal(0)) - Expect(len(cidWrapper1.StorageNodes)).To(Equal(0)) - Expect(len(cidWrapper1.Receipts)).To(Equal(1)) + Expect(cids1[0].BlockNumber).To(Equal(mocks.MockCIDWrapper.BlockNumber)) + Expect(cids1[0].Header).To(Equal(eth2.HeaderModel{})) + Expect(len(cids1[0].Transactions)).To(Equal(0)) + Expect(len(cids1[0].StateNodes)).To(Equal(0)) + Expect(len(cids1[0].StorageNodes)).To(Equal(0)) + Expect(len(cids1[0].Receipts)).To(Equal(1)) expectedReceiptCID := mocks.MockCIDWrapper.Receipts[0] - expectedReceiptCID.ID = cidWrapper1.Receipts[0].ID - expectedReceiptCID.TxID = cidWrapper1.Receipts[0].TxID - Expect(cidWrapper1.Receipts[0]).To(Equal(expectedReceiptCID)) + expectedReceiptCID.ID = cids1[0].Receipts[0].ID + expectedReceiptCID.TxID = cids1[0].Receipts[0].TxID + Expect(cids1[0].Receipts[0]).To(Equal(expectedReceiptCID)) cids2, empty, err := retriever.Retrieve(rctTopicsFilter, 1) Expect(err).ToNot(HaveOccurred()) Expect(empty).ToNot(BeTrue()) Expect(len(cids2)).To(Equal(1)) - cidWrapper2, ok := cids2[0].(*eth.CIDWrapper) - Expect(ok).To(BeTrue()) - Expect(cidWrapper2.BlockNumber).To(Equal(mocks.MockCIDWrapper.BlockNumber)) - Expect(cidWrapper2.Header).To(Equal(eth.HeaderModel{})) - Expect(len(cidWrapper2.Transactions)).To(Equal(0)) - Expect(len(cidWrapper2.StateNodes)).To(Equal(0)) - Expect(len(cidWrapper2.StorageNodes)).To(Equal(0)) - Expect(len(cidWrapper2.Receipts)).To(Equal(1)) + Expect(cids2[0].BlockNumber).To(Equal(mocks.MockCIDWrapper.BlockNumber)) + Expect(cids2[0].Header).To(Equal(eth2.HeaderModel{})) + Expect(len(cids2[0].Transactions)).To(Equal(0)) + Expect(len(cids2[0].StateNodes)).To(Equal(0)) + Expect(len(cids2[0].StorageNodes)).To(Equal(0)) + Expect(len(cids2[0].Receipts)).To(Equal(1)) expectedReceiptCID = mocks.MockCIDWrapper.Receipts[0] - expectedReceiptCID.ID = cidWrapper2.Receipts[0].ID - expectedReceiptCID.TxID = cidWrapper2.Receipts[0].TxID - Expect(cidWrapper2.Receipts[0]).To(Equal(expectedReceiptCID)) + expectedReceiptCID.ID = cids2[0].Receipts[0].ID + expectedReceiptCID.TxID = cids2[0].Receipts[0].TxID + Expect(cids2[0].Receipts[0]).To(Equal(expectedReceiptCID)) cids3, empty, err := retriever.Retrieve(rctTopicsAndAddressFilter, 1) Expect(err).ToNot(HaveOccurred()) Expect(empty).ToNot(BeTrue()) Expect(len(cids3)).To(Equal(1)) - cidWrapper3, ok := cids3[0].(*eth.CIDWrapper) - Expect(ok).To(BeTrue()) - Expect(cidWrapper3.BlockNumber).To(Equal(mocks.MockCIDWrapper.BlockNumber)) - Expect(cidWrapper3.Header).To(Equal(eth.HeaderModel{})) - Expect(len(cidWrapper3.Transactions)).To(Equal(0)) - Expect(len(cidWrapper3.StateNodes)).To(Equal(0)) - Expect(len(cidWrapper3.StorageNodes)).To(Equal(0)) - Expect(len(cidWrapper3.Receipts)).To(Equal(1)) + Expect(cids3[0].BlockNumber).To(Equal(mocks.MockCIDWrapper.BlockNumber)) + Expect(cids3[0].Header).To(Equal(eth2.HeaderModel{})) + Expect(len(cids3[0].Transactions)).To(Equal(0)) + Expect(len(cids3[0].StateNodes)).To(Equal(0)) + Expect(len(cids3[0].StorageNodes)).To(Equal(0)) + Expect(len(cids3[0].Receipts)).To(Equal(1)) expectedReceiptCID = mocks.MockCIDWrapper.Receipts[0] - expectedReceiptCID.ID = cidWrapper3.Receipts[0].ID - expectedReceiptCID.TxID = cidWrapper3.Receipts[0].TxID - Expect(cidWrapper3.Receipts[0]).To(Equal(expectedReceiptCID)) + expectedReceiptCID.ID = cids3[0].Receipts[0].ID + expectedReceiptCID.TxID = cids3[0].Receipts[0].TxID + Expect(cids3[0].Receipts[0]).To(Equal(expectedReceiptCID)) cids4, empty, err := retriever.Retrieve(rctAddressesAndTopicFilter, 1) Expect(err).ToNot(HaveOccurred()) Expect(empty).ToNot(BeTrue()) Expect(len(cids4)).To(Equal(1)) - cidWrapper4, ok := cids4[0].(*eth.CIDWrapper) - Expect(ok).To(BeTrue()) - Expect(cidWrapper4.BlockNumber).To(Equal(mocks.MockCIDWrapper.BlockNumber)) - Expect(cidWrapper4.Header).To(Equal(eth.HeaderModel{})) - Expect(len(cidWrapper4.Transactions)).To(Equal(0)) - Expect(len(cidWrapper4.StateNodes)).To(Equal(0)) - Expect(len(cidWrapper4.StorageNodes)).To(Equal(0)) - Expect(len(cidWrapper4.Receipts)).To(Equal(1)) + Expect(cids4[0].BlockNumber).To(Equal(mocks.MockCIDWrapper.BlockNumber)) + Expect(cids4[0].Header).To(Equal(eth2.HeaderModel{})) + Expect(len(cids4[0].Transactions)).To(Equal(0)) + Expect(len(cids4[0].StateNodes)).To(Equal(0)) + Expect(len(cids4[0].StorageNodes)).To(Equal(0)) + Expect(len(cids4[0].Receipts)).To(Equal(1)) expectedReceiptCID = mocks.MockCIDWrapper.Receipts[1] - expectedReceiptCID.ID = cidWrapper4.Receipts[0].ID - expectedReceiptCID.TxID = cidWrapper4.Receipts[0].TxID - Expect(cidWrapper4.Receipts[0]).To(Equal(expectedReceiptCID)) + expectedReceiptCID.ID = cids4[0].Receipts[0].ID + expectedReceiptCID.TxID = cids4[0].Receipts[0].TxID + Expect(cids4[0].Receipts[0]).To(Equal(expectedReceiptCID)) cids5, empty, err := retriever.Retrieve(rctsForAllCollectedTrxs, 1) Expect(err).ToNot(HaveOccurred()) Expect(empty).ToNot(BeTrue()) Expect(len(cids5)).To(Equal(1)) - cidWrapper5, ok := cids5[0].(*eth.CIDWrapper) - Expect(ok).To(BeTrue()) - Expect(cidWrapper5.BlockNumber).To(Equal(mocks.MockCIDWrapper.BlockNumber)) - Expect(cidWrapper5.Header).To(Equal(eth.HeaderModel{})) - Expect(len(cidWrapper5.Transactions)).To(Equal(3)) - Expect(eth.TxModelsContainsCID(cidWrapper5.Transactions, mocks.Trx1CID.String())).To(BeTrue()) - Expect(eth.TxModelsContainsCID(cidWrapper5.Transactions, mocks.Trx2CID.String())).To(BeTrue()) - Expect(eth.TxModelsContainsCID(cidWrapper5.Transactions, mocks.Trx3CID.String())).To(BeTrue()) - Expect(len(cidWrapper5.StateNodes)).To(Equal(0)) - Expect(len(cidWrapper5.StorageNodes)).To(Equal(0)) - Expect(len(cidWrapper5.Receipts)).To(Equal(3)) - Expect(eth.ReceiptModelsContainsCID(cidWrapper5.Receipts, mocks.Rct1CID.String())).To(BeTrue()) - Expect(eth.ReceiptModelsContainsCID(cidWrapper5.Receipts, mocks.Rct2CID.String())).To(BeTrue()) - Expect(eth.ReceiptModelsContainsCID(cidWrapper5.Receipts, mocks.Rct3CID.String())).To(BeTrue()) + Expect(cids5[0].BlockNumber).To(Equal(mocks.MockCIDWrapper.BlockNumber)) + Expect(cids5[0].Header).To(Equal(eth2.HeaderModel{})) + Expect(len(cids5[0].Transactions)).To(Equal(3)) + Expect(eth.TxModelsContainsCID(cids5[0].Transactions, mocks.Trx1CID.String())).To(BeTrue()) + Expect(eth.TxModelsContainsCID(cids5[0].Transactions, mocks.Trx2CID.String())).To(BeTrue()) + Expect(eth.TxModelsContainsCID(cids5[0].Transactions, mocks.Trx3CID.String())).To(BeTrue()) + Expect(len(cids5[0].StateNodes)).To(Equal(0)) + Expect(len(cids5[0].StorageNodes)).To(Equal(0)) + Expect(len(cids5[0].Receipts)).To(Equal(3)) + Expect(eth.ReceiptModelsContainsCID(cids5[0].Receipts, mocks.Rct1CID.String())).To(BeTrue()) + Expect(eth.ReceiptModelsContainsCID(cids5[0].Receipts, mocks.Rct2CID.String())).To(BeTrue()) + Expect(eth.ReceiptModelsContainsCID(cids5[0].Receipts, mocks.Rct3CID.String())).To(BeTrue()) cids6, empty, err := retriever.Retrieve(rctsForSelectCollectedTrxs, 1) Expect(err).ToNot(HaveOccurred()) Expect(empty).ToNot(BeTrue()) Expect(len(cids6)).To(Equal(1)) - cidWrapper6, ok := cids6[0].(*eth.CIDWrapper) - Expect(ok).To(BeTrue()) - Expect(cidWrapper6.BlockNumber).To(Equal(mocks.MockCIDWrapper.BlockNumber)) - Expect(cidWrapper6.Header).To(Equal(eth.HeaderModel{})) - Expect(len(cidWrapper6.Transactions)).To(Equal(1)) + Expect(cids6[0].BlockNumber).To(Equal(mocks.MockCIDWrapper.BlockNumber)) + Expect(cids6[0].Header).To(Equal(eth2.HeaderModel{})) + Expect(len(cids6[0].Transactions)).To(Equal(1)) expectedTxCID := mocks.MockCIDWrapper.Transactions[1] - expectedTxCID.ID = cidWrapper6.Transactions[0].ID - expectedTxCID.HeaderID = cidWrapper6.Transactions[0].HeaderID - Expect(cidWrapper6.Transactions[0]).To(Equal(expectedTxCID)) - Expect(len(cidWrapper6.StateNodes)).To(Equal(0)) - Expect(len(cidWrapper6.StorageNodes)).To(Equal(0)) - Expect(len(cidWrapper6.Receipts)).To(Equal(1)) + expectedTxCID.ID = cids6[0].Transactions[0].ID + expectedTxCID.HeaderID = cids6[0].Transactions[0].HeaderID + Expect(cids6[0].Transactions[0]).To(Equal(expectedTxCID)) + Expect(len(cids6[0].StateNodes)).To(Equal(0)) + Expect(len(cids6[0].StorageNodes)).To(Equal(0)) + Expect(len(cids6[0].Receipts)).To(Equal(1)) expectedReceiptCID = mocks.MockCIDWrapper.Receipts[1] - expectedReceiptCID.ID = cidWrapper6.Receipts[0].ID - expectedReceiptCID.TxID = cidWrapper6.Receipts[0].TxID - Expect(cidWrapper6.Receipts[0]).To(Equal(expectedReceiptCID)) + expectedReceiptCID.ID = cids6[0].Receipts[0].ID + expectedReceiptCID.TxID = cids6[0].Receipts[0].TxID + Expect(cids6[0].Receipts[0]).To(Equal(expectedReceiptCID)) cids7, empty, err := retriever.Retrieve(stateFilter, 1) Expect(err).ToNot(HaveOccurred()) Expect(empty).ToNot(BeTrue()) Expect(len(cids7)).To(Equal(1)) - cidWrapper7, ok := cids7[0].(*eth.CIDWrapper) - Expect(ok).To(BeTrue()) - Expect(cidWrapper7.BlockNumber).To(Equal(mocks.MockCIDWrapper.BlockNumber)) - Expect(cidWrapper7.Header).To(Equal(eth.HeaderModel{})) - Expect(len(cidWrapper7.Transactions)).To(Equal(0)) - Expect(len(cidWrapper7.Receipts)).To(Equal(0)) - Expect(len(cidWrapper7.StorageNodes)).To(Equal(0)) - Expect(len(cidWrapper7.StateNodes)).To(Equal(1)) - Expect(cidWrapper7.StateNodes[0]).To(Equal(eth.StateNodeModel{ - ID: cidWrapper7.StateNodes[0].ID, - HeaderID: cidWrapper7.StateNodes[0].HeaderID, + Expect(cids7[0].BlockNumber).To(Equal(mocks.MockCIDWrapper.BlockNumber)) + Expect(cids7[0].Header).To(Equal(eth2.HeaderModel{})) + Expect(len(cids7[0].Transactions)).To(Equal(0)) + Expect(len(cids7[0].Receipts)).To(Equal(0)) + Expect(len(cids7[0].StorageNodes)).To(Equal(0)) + Expect(len(cids7[0].StateNodes)).To(Equal(1)) + Expect(cids7[0].StateNodes[0]).To(Equal(eth2.StateNodeModel{ + ID: cids7[0].StateNodes[0].ID, + HeaderID: cids7[0].StateNodes[0].HeaderID, NodeType: 2, StateKey: common.BytesToHash(mocks.AccountLeafKey).Hex(), CID: mocks.State2CID.String(), @@ -602,11 +585,11 @@ var _ = Describe("Retriever", func() { gaps, err := retriever.RetrieveGapsInData(1) Expect(err).ToNot(HaveOccurred()) Expect(len(gaps)).To(Equal(5)) - Expect(shared.ListContainsGap(gaps, shared.Gap{Start: 0, Stop: 0})).To(BeTrue()) - Expect(shared.ListContainsGap(gaps, shared.Gap{Start: 2, Stop: 4})).To(BeTrue()) - Expect(shared.ListContainsGap(gaps, shared.Gap{Start: 6, Stop: 99})).To(BeTrue()) - Expect(shared.ListContainsGap(gaps, shared.Gap{Start: 107, Stop: 999})).To(BeTrue()) - Expect(shared.ListContainsGap(gaps, shared.Gap{Start: 1001, Stop: 1010100})).To(BeTrue()) + Expect(shared.ListContainsGap(gaps, eth2.DBGap{Start: 0, Stop: 0})).To(BeTrue()) + Expect(shared.ListContainsGap(gaps, eth2.DBGap{Start: 2, Stop: 4})).To(BeTrue()) + Expect(shared.ListContainsGap(gaps, eth2.DBGap{Start: 6, Stop: 99})).To(BeTrue()) + Expect(shared.ListContainsGap(gaps, eth2.DBGap{Start: 107, Stop: 999})).To(BeTrue()) + Expect(shared.ListContainsGap(gaps, eth2.DBGap{Start: 1001, Stop: 1010100})).To(BeTrue()) }) It("Finds validation level gaps", func() { @@ -669,21 +652,21 @@ var _ = Describe("Retriever", func() { err = repo.Publish(payload14) Expect(err).ToNot(HaveOccurred()) - cleaner := eth.NewCleaner(db) + cleaner := eth2.NewDBCleaner(db) err = cleaner.ResetValidation([][2]uint64{{101, 102}, {104, 104}, {106, 108}}) Expect(err).ToNot(HaveOccurred()) gaps, err := retriever.RetrieveGapsInData(1) Expect(err).ToNot(HaveOccurred()) Expect(len(gaps)).To(Equal(8)) - Expect(shared.ListContainsGap(gaps, shared.Gap{Start: 0, Stop: 0})).To(BeTrue()) - Expect(shared.ListContainsGap(gaps, shared.Gap{Start: 2, Stop: 4})).To(BeTrue()) - Expect(shared.ListContainsGap(gaps, shared.Gap{Start: 6, Stop: 99})).To(BeTrue()) - Expect(shared.ListContainsGap(gaps, shared.Gap{Start: 101, Stop: 102})).To(BeTrue()) - Expect(shared.ListContainsGap(gaps, shared.Gap{Start: 104, Stop: 104})).To(BeTrue()) - Expect(shared.ListContainsGap(gaps, shared.Gap{Start: 106, Stop: 108})).To(BeTrue()) - Expect(shared.ListContainsGap(gaps, shared.Gap{Start: 110, Stop: 999})).To(BeTrue()) - Expect(shared.ListContainsGap(gaps, shared.Gap{Start: 1001, Stop: 1010100})).To(BeTrue()) + Expect(shared.ListContainsGap(gaps, eth2.DBGap{Start: 0, Stop: 0})).To(BeTrue()) + Expect(shared.ListContainsGap(gaps, eth2.DBGap{Start: 2, Stop: 4})).To(BeTrue()) + Expect(shared.ListContainsGap(gaps, eth2.DBGap{Start: 6, Stop: 99})).To(BeTrue()) + Expect(shared.ListContainsGap(gaps, eth2.DBGap{Start: 101, Stop: 102})).To(BeTrue()) + Expect(shared.ListContainsGap(gaps, eth2.DBGap{Start: 104, Stop: 104})).To(BeTrue()) + Expect(shared.ListContainsGap(gaps, eth2.DBGap{Start: 106, Stop: 108})).To(BeTrue()) + Expect(shared.ListContainsGap(gaps, eth2.DBGap{Start: 110, Stop: 999})).To(BeTrue()) + Expect(shared.ListContainsGap(gaps, eth2.DBGap{Start: 1001, Stop: 1010100})).To(BeTrue()) }) }) }) diff --git a/pkg/eth/eth_suite_test.go b/pkg/eth/eth_suite_test.go index 47adce6b..3be0ebfc 100644 --- a/pkg/eth/eth_suite_test.go +++ b/pkg/eth/eth_suite_test.go @@ -27,7 +27,7 @@ import ( func TestETHWatcher(t *testing.T) { RegisterFailHandler(Fail) - RunSpecs(t, "ETH IPFS Watcher Suite Test") + RunSpecs(t, "eth ipld server eth suite test") } var _ = BeforeSuite(func() { diff --git a/pkg/eth/filterer.go b/pkg/eth/filterer.go index 08acc3da..8f41833c 100644 --- a/pkg/eth/filterer.go +++ b/pkg/eth/filterer.go @@ -18,7 +18,6 @@ package eth import ( "bytes" - "fmt" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/core/types" @@ -27,11 +26,16 @@ import ( "github.com/ethereum/go-ethereum/statediff" "github.com/multiformats/go-multihash" + "github.com/vulcanize/ipfs-blockchain-watcher/pkg/eth" "github.com/vulcanize/ipfs-blockchain-watcher/pkg/ipfs" "github.com/vulcanize/ipfs-blockchain-watcher/pkg/ipfs/ipld" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/shared" ) +// Filterer interface for substituing mocks in tests +type Filterer interface { + Filter(filter SubscriptionSettings, payload eth.ConvertedPayload) (*eth.IPLDs, error) +} + // ResponseFilterer satisfies the ResponseFilterer interface for ethereum type ResponseFilterer struct{} @@ -41,42 +45,34 @@ func NewResponseFilterer() *ResponseFilterer { } // Filter is used to filter through eth data to extract and package requested data into a Payload -func (s *ResponseFilterer) Filter(filter shared.SubscriptionSettings, payload shared.ConvertedData) (shared.IPLDs, error) { - ethFilters, ok := filter.(*SubscriptionSettings) - if !ok { - return IPLDs{}, fmt.Errorf("eth filterer expected filter type %T got %T", &SubscriptionSettings{}, filter) - } - ethPayload, ok := payload.(ConvertedPayload) - if !ok { - return IPLDs{}, fmt.Errorf("eth filterer expected payload type %T got %T", ConvertedPayload{}, payload) - } - if checkRange(ethFilters.Start.Int64(), ethFilters.End.Int64(), ethPayload.Block.Number().Int64()) { - response := new(IPLDs) - response.TotalDifficulty = ethPayload.TotalDifficulty - if err := s.filterHeaders(ethFilters.HeaderFilter, response, ethPayload); err != nil { - return IPLDs{}, err +func (s *ResponseFilterer) Filter(filter SubscriptionSettings, payload eth.ConvertedPayload) (*eth.IPLDs, error) { + if checkRange(filter.Start.Int64(), filter.End.Int64(), payload.Block.Number().Int64()) { + response := new(eth.IPLDs) + response.TotalDifficulty = payload.TotalDifficulty + if err := s.filterHeaders(filter.HeaderFilter, response, payload); err != nil { + return nil, err } - txHashes, err := s.filterTransactions(ethFilters.TxFilter, response, ethPayload) + txHashes, err := s.filterTransactions(filter.TxFilter, response, payload) if err != nil { - return IPLDs{}, err + return nil, err } var filterTxs []common.Hash - if ethFilters.ReceiptFilter.MatchTxs { + if filter.ReceiptFilter.MatchTxs { filterTxs = txHashes } - if err := s.filerReceipts(ethFilters.ReceiptFilter, response, ethPayload, filterTxs); err != nil { - return IPLDs{}, err + if err := s.filerReceipts(filter.ReceiptFilter, response, payload, filterTxs); err != nil { + return nil, err } - if err := s.filterStateAndStorage(ethFilters.StateFilter, ethFilters.StorageFilter, response, ethPayload); err != nil { - return IPLDs{}, err + if err := s.filterStateAndStorage(filter.StateFilter, filter.StorageFilter, response, payload); err != nil { + return nil, err } - response.BlockNumber = ethPayload.Block.Number() - return *response, nil + response.BlockNumber = payload.Block.Number() + return response, nil } - return IPLDs{}, nil + return nil, nil } -func (s *ResponseFilterer) filterHeaders(headerFilter HeaderFilter, response *IPLDs, payload ConvertedPayload) error { +func (s *ResponseFilterer) filterHeaders(headerFilter HeaderFilter, response *eth.IPLDs, payload eth.ConvertedPayload) error { if !headerFilter.Off { headerRLP, err := rlp.EncodeToBytes(payload.Block.Header()) if err != nil { @@ -118,7 +114,7 @@ func checkRange(start, end, actual int64) bool { return false } -func (s *ResponseFilterer) filterTransactions(trxFilter TxFilter, response *IPLDs, payload ConvertedPayload) ([]common.Hash, error) { +func (s *ResponseFilterer) filterTransactions(trxFilter TxFilter, response *eth.IPLDs, payload eth.ConvertedPayload) ([]common.Hash, error) { var trxHashes []common.Hash if !trxFilter.Off { trxLen := len(payload.Block.Body().Transactions) @@ -166,7 +162,7 @@ func checkTransactionAddrs(wantedSrc, wantedDst []string, actualSrc, actualDst s return false } -func (s *ResponseFilterer) filerReceipts(receiptFilter ReceiptFilter, response *IPLDs, payload ConvertedPayload, trxHashes []common.Hash) error { +func (s *ResponseFilterer) filerReceipts(receiptFilter ReceiptFilter, response *eth.IPLDs, payload eth.ConvertedPayload, trxHashes []common.Hash) error { if !receiptFilter.Off { response.Receipts = make([]ipfs.BlockModel, 0, len(payload.Receipts)) for i, receipt := range payload.Receipts { @@ -256,9 +252,9 @@ func slicesShareString(slice1, slice2 []string) int { } // filterStateAndStorage filters state and storage nodes into the response according to the provided filters -func (s *ResponseFilterer) filterStateAndStorage(stateFilter StateFilter, storageFilter StorageFilter, response *IPLDs, payload ConvertedPayload) error { - response.StateNodes = make([]StateNode, 0, len(payload.StateNodes)) - response.StorageNodes = make([]StorageNode, 0) +func (s *ResponseFilterer) filterStateAndStorage(stateFilter StateFilter, storageFilter StorageFilter, response *eth.IPLDs, payload eth.ConvertedPayload) error { + response.StateNodes = make([]eth.StateNode, 0, len(payload.StateNodes)) + response.StorageNodes = make([]eth.StorageNode, 0) stateAddressFilters := make([]common.Hash, len(stateFilter.Addresses)) for i, addr := range stateFilter.Addresses { stateAddressFilters[i] = crypto.Keccak256Hash(common.HexToAddress(addr).Bytes()) @@ -278,7 +274,7 @@ func (s *ResponseFilterer) filterStateAndStorage(stateFilter StateFilter, storag if err != nil { return err } - response.StateNodes = append(response.StateNodes, StateNode{ + response.StateNodes = append(response.StateNodes, eth.StateNode{ StateLeafKey: stateNode.LeafKey, Path: stateNode.Path, IPLD: ipfs.BlockModel{ @@ -296,7 +292,7 @@ func (s *ResponseFilterer) filterStateAndStorage(stateFilter StateFilter, storag if err != nil { return err } - response.StorageNodes = append(response.StorageNodes, StorageNode{ + response.StorageNodes = append(response.StorageNodes, eth.StorageNode{ StateLeafKey: stateNode.LeafKey, StorageLeafKey: storageNode.LeafKey, IPLD: ipfs.BlockModel{ diff --git a/pkg/eth/filterer_test.go b/pkg/eth/filterer_test.go index 490f1386..41cbcf93 100644 --- a/pkg/eth/filterer_test.go +++ b/pkg/eth/filterer_test.go @@ -23,10 +23,11 @@ import ( . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/eth" "github.com/vulcanize/ipfs-blockchain-watcher/pkg/eth/mocks" "github.com/vulcanize/ipfs-blockchain-watcher/pkg/ipfs" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/shared" + + "github.com/vulcanize/ipld-eth-server/pkg/eth" + "github.com/vulcanize/ipld-eth-server/pkg/shared" ) var ( @@ -40,10 +41,9 @@ var _ = Describe("Filterer", func() { }) It("Transcribes all the data from the IPLDPayload into the StreamPayload if given an open filter", func() { - payload, err := filterer.Filter(openFilter, mocks.MockConvertedPayload) + iplds, err := filterer.Filter(openFilter, mocks.MockConvertedPayload) Expect(err).ToNot(HaveOccurred()) - iplds, ok := payload.(eth.IPLDs) - Expect(ok).To(BeTrue()) + Expect(iplds).ToNot(BeNil()) Expect(iplds.BlockNumber.Int64()).To(Equal(mocks.MockIPLDs.BlockNumber.Int64())) Expect(iplds.Header).To(Equal(mocks.MockIPLDs.Header)) var expectedEmptyUncles []ipfs.BlockModel @@ -76,10 +76,9 @@ var _ = Describe("Filterer", func() { }) It("Applies filters from the provided config.Subscription", func() { - payload1, err := filterer.Filter(rctAddressFilter, mocks.MockConvertedPayload) + iplds1, err := filterer.Filter(rctAddressFilter, mocks.MockConvertedPayload) Expect(err).ToNot(HaveOccurred()) - iplds1, ok := payload1.(eth.IPLDs) - Expect(ok).To(BeTrue()) + Expect(iplds1).ToNot(BeNil()) Expect(iplds1.BlockNumber.Int64()).To(Equal(mocks.MockIPLDs.BlockNumber.Int64())) Expect(iplds1.Header).To(Equal(ipfs.BlockModel{})) Expect(len(iplds1.Uncles)).To(Equal(0)) @@ -92,10 +91,9 @@ var _ = Describe("Filterer", func() { CID: mocks.Rct1IPLD.Cid().String(), })) - payload2, err := filterer.Filter(rctTopicsFilter, mocks.MockConvertedPayload) + iplds2, err := filterer.Filter(rctTopicsFilter, mocks.MockConvertedPayload) Expect(err).ToNot(HaveOccurred()) - iplds2, ok := payload2.(eth.IPLDs) - Expect(ok).To(BeTrue()) + Expect(iplds2).ToNot(BeNil()) Expect(iplds2.BlockNumber.Int64()).To(Equal(mocks.MockIPLDs.BlockNumber.Int64())) Expect(iplds2.Header).To(Equal(ipfs.BlockModel{})) Expect(len(iplds2.Uncles)).To(Equal(0)) @@ -108,10 +106,9 @@ var _ = Describe("Filterer", func() { CID: mocks.Rct1IPLD.Cid().String(), })) - payload3, err := filterer.Filter(rctTopicsAndAddressFilter, mocks.MockConvertedPayload) + iplds3, err := filterer.Filter(rctTopicsAndAddressFilter, mocks.MockConvertedPayload) Expect(err).ToNot(HaveOccurred()) - iplds3, ok := payload3.(eth.IPLDs) - Expect(ok).To(BeTrue()) + Expect(iplds3).ToNot(BeNil()) Expect(iplds3.BlockNumber.Int64()).To(Equal(mocks.MockIPLDs.BlockNumber.Int64())) Expect(iplds3.Header).To(Equal(ipfs.BlockModel{})) Expect(len(iplds3.Uncles)).To(Equal(0)) @@ -124,10 +121,9 @@ var _ = Describe("Filterer", func() { CID: mocks.Rct1IPLD.Cid().String(), })) - payload4, err := filterer.Filter(rctAddressesAndTopicFilter, mocks.MockConvertedPayload) + iplds4, err := filterer.Filter(rctAddressesAndTopicFilter, mocks.MockConvertedPayload) Expect(err).ToNot(HaveOccurred()) - iplds4, ok := payload4.(eth.IPLDs) - Expect(ok).To(BeTrue()) + Expect(iplds4).ToNot(BeNil()) Expect(iplds4.BlockNumber.Int64()).To(Equal(mocks.MockIPLDs.BlockNumber.Int64())) Expect(iplds4.Header).To(Equal(ipfs.BlockModel{})) Expect(len(iplds4.Uncles)).To(Equal(0)) @@ -140,10 +136,9 @@ var _ = Describe("Filterer", func() { CID: mocks.Rct2IPLD.Cid().String(), })) - payload5, err := filterer.Filter(rctsForAllCollectedTrxs, mocks.MockConvertedPayload) + iplds5, err := filterer.Filter(rctsForAllCollectedTrxs, mocks.MockConvertedPayload) Expect(err).ToNot(HaveOccurred()) - iplds5, ok := payload5.(eth.IPLDs) - Expect(ok).To(BeTrue()) + Expect(iplds5).ToNot(BeNil()) Expect(iplds5.BlockNumber.Int64()).To(Equal(mocks.MockIPLDs.BlockNumber.Int64())) Expect(iplds5.Header).To(Equal(ipfs.BlockModel{})) Expect(len(iplds5.Uncles)).To(Equal(0)) @@ -158,10 +153,9 @@ var _ = Describe("Filterer", func() { Expect(shared.IPLDsContainBytes(iplds5.Receipts, mocks.MockReceipts.GetRlp(1))).To(BeTrue()) Expect(shared.IPLDsContainBytes(iplds5.Receipts, mocks.MockReceipts.GetRlp(2))).To(BeTrue()) - payload6, err := filterer.Filter(rctsForSelectCollectedTrxs, mocks.MockConvertedPayload) + iplds6, err := filterer.Filter(rctsForSelectCollectedTrxs, mocks.MockConvertedPayload) Expect(err).ToNot(HaveOccurred()) - iplds6, ok := payload6.(eth.IPLDs) - Expect(ok).To(BeTrue()) + Expect(iplds6).ToNot(BeNil()) Expect(iplds6.BlockNumber.Int64()).To(Equal(mocks.MockIPLDs.BlockNumber.Int64())) Expect(iplds6.Header).To(Equal(ipfs.BlockModel{})) Expect(len(iplds6.Uncles)).To(Equal(0)) @@ -175,10 +169,9 @@ var _ = Describe("Filterer", func() { CID: mocks.Rct2IPLD.Cid().String(), })) - payload7, err := filterer.Filter(stateFilter, mocks.MockConvertedPayload) + iplds7, err := filterer.Filter(stateFilter, mocks.MockConvertedPayload) Expect(err).ToNot(HaveOccurred()) - iplds7, ok := payload7.(eth.IPLDs) - Expect(ok).To(BeTrue()) + Expect(iplds7).ToNot(BeNil()) Expect(iplds7.BlockNumber.Int64()).To(Equal(mocks.MockIPLDs.BlockNumber.Int64())) Expect(iplds7.Header).To(Equal(ipfs.BlockModel{})) Expect(len(iplds7.Uncles)).To(Equal(0)) @@ -192,10 +185,9 @@ var _ = Describe("Filterer", func() { CID: mocks.State2IPLD.Cid().String(), })) - payload8, err := filterer.Filter(rctTopicsAndAddressFilterFail, mocks.MockConvertedPayload) + iplds8, err := filterer.Filter(rctTopicsAndAddressFilterFail, mocks.MockConvertedPayload) Expect(err).ToNot(HaveOccurred()) - iplds8, ok := payload8.(eth.IPLDs) - Expect(ok).To(BeTrue()) + Expect(iplds8).ToNot(BeNil()) Expect(iplds8.BlockNumber.Int64()).To(Equal(mocks.MockIPLDs.BlockNumber.Int64())) Expect(iplds8.Header).To(Equal(ipfs.BlockModel{})) Expect(len(iplds8.Uncles)).To(Equal(0)) diff --git a/pkg/eth/ipld_fetcher.go b/pkg/eth/ipld_fetcher.go index acea6197..0ed3df6c 100644 --- a/pkg/eth/ipld_fetcher.go +++ b/pkg/eth/ipld_fetcher.go @@ -25,11 +25,18 @@ import ( "github.com/jmoiron/sqlx" log "github.com/sirupsen/logrus" + "github.com/vulcanize/ipfs-blockchain-watcher/pkg/eth" "github.com/vulcanize/ipfs-blockchain-watcher/pkg/ipfs" "github.com/vulcanize/ipfs-blockchain-watcher/pkg/postgres" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/shared" + + "github.com/vulcanize/ipld-eth-server/pkg/shared" ) +// Fetcher interface for substituting mocks in tests +type Fetcher interface { + Fetch(cids eth.CIDWrapper) (*eth.IPLDs, error) +} + // IPLDFetcher satisfies the IPLDFetcher interface for ethereum // It interfaces directly with PG-IPFS type IPLDFetcher struct { @@ -44,18 +51,15 @@ func NewIPLDFetcher(db *postgres.DB) *IPLDFetcher { } // Fetch is the exported method for fetching and returning all the IPLDS specified in the CIDWrapper -func (f *IPLDFetcher) Fetch(cids shared.CIDsForFetching) (shared.IPLDs, error) { - cidWrapper, ok := cids.(*CIDWrapper) - if !ok { - return nil, fmt.Errorf("eth fetcher: expected cids type %T got %T", &CIDWrapper{}, cids) - } +func (f *IPLDFetcher) Fetch(cids eth.CIDWrapper) (*eth.IPLDs, error) { log.Debug("fetching iplds") - iplds := IPLDs{} - iplds.TotalDifficulty, ok = new(big.Int).SetString(cidWrapper.Header.TotalDifficulty, 10) + iplds := new(eth.IPLDs) + var ok bool + iplds.TotalDifficulty, ok = new(big.Int).SetString(cids.Header.TotalDifficulty, 10) if !ok { return nil, errors.New("eth fetcher: unable to set total difficulty") } - iplds.BlockNumber = cidWrapper.BlockNumber + iplds.BlockNumber = cids.BlockNumber tx, err := f.db.Beginx() if err != nil { @@ -72,27 +76,27 @@ func (f *IPLDFetcher) Fetch(cids shared.CIDsForFetching) (shared.IPLDs, error) { } }() - iplds.Header, err = f.FetchHeader(tx, cidWrapper.Header) + iplds.Header, err = f.FetchHeader(tx, cids.Header) if err != nil { return nil, fmt.Errorf("eth pg fetcher: header fetching error: %s", err.Error()) } - iplds.Uncles, err = f.FetchUncles(tx, cidWrapper.Uncles) + iplds.Uncles, err = f.FetchUncles(tx, cids.Uncles) if err != nil { return nil, fmt.Errorf("eth pg fetcher: uncle fetching error: %s", err.Error()) } - iplds.Transactions, err = f.FetchTrxs(tx, cidWrapper.Transactions) + iplds.Transactions, err = f.FetchTrxs(tx, cids.Transactions) if err != nil { return nil, fmt.Errorf("eth pg fetcher: transaction fetching error: %s", err.Error()) } - iplds.Receipts, err = f.FetchRcts(tx, cidWrapper.Receipts) + iplds.Receipts, err = f.FetchRcts(tx, cids.Receipts) if err != nil { return nil, fmt.Errorf("eth pg fetcher: receipt fetching error: %s", err.Error()) } - iplds.StateNodes, err = f.FetchState(tx, cidWrapper.StateNodes) + iplds.StateNodes, err = f.FetchState(tx, cids.StateNodes) if err != nil { return nil, fmt.Errorf("eth pg fetcher: state fetching error: %s", err.Error()) } - iplds.StorageNodes, err = f.FetchStorage(tx, cidWrapper.StorageNodes) + iplds.StorageNodes, err = f.FetchStorage(tx, cids.StorageNodes) if err != nil { return nil, fmt.Errorf("eth pg fetcher: storage fetching error: %s", err.Error()) } @@ -100,7 +104,7 @@ func (f *IPLDFetcher) Fetch(cids shared.CIDsForFetching) (shared.IPLDs, error) { } // FetchHeaders fetches headers -func (f *IPLDFetcher) FetchHeader(tx *sqlx.Tx, c HeaderModel) (ipfs.BlockModel, error) { +func (f *IPLDFetcher) FetchHeader(tx *sqlx.Tx, c eth.HeaderModel) (ipfs.BlockModel, error) { log.Debug("fetching header ipld") headerBytes, err := shared.FetchIPLDByMhKey(tx, c.MhKey) if err != nil { @@ -113,7 +117,7 @@ func (f *IPLDFetcher) FetchHeader(tx *sqlx.Tx, c HeaderModel) (ipfs.BlockModel, } // FetchUncles fetches uncles -func (f *IPLDFetcher) FetchUncles(tx *sqlx.Tx, cids []UncleModel) ([]ipfs.BlockModel, error) { +func (f *IPLDFetcher) FetchUncles(tx *sqlx.Tx, cids []eth.UncleModel) ([]ipfs.BlockModel, error) { log.Debug("fetching uncle iplds") uncleIPLDs := make([]ipfs.BlockModel, len(cids)) for i, c := range cids { @@ -130,7 +134,7 @@ func (f *IPLDFetcher) FetchUncles(tx *sqlx.Tx, cids []UncleModel) ([]ipfs.BlockM } // FetchTrxs fetches transactions -func (f *IPLDFetcher) FetchTrxs(tx *sqlx.Tx, cids []TxModel) ([]ipfs.BlockModel, error) { +func (f *IPLDFetcher) FetchTrxs(tx *sqlx.Tx, cids []eth.TxModel) ([]ipfs.BlockModel, error) { log.Debug("fetching transaction iplds") trxIPLDs := make([]ipfs.BlockModel, len(cids)) for i, c := range cids { @@ -147,7 +151,7 @@ func (f *IPLDFetcher) FetchTrxs(tx *sqlx.Tx, cids []TxModel) ([]ipfs.BlockModel, } // FetchRcts fetches receipts -func (f *IPLDFetcher) FetchRcts(tx *sqlx.Tx, cids []ReceiptModel) ([]ipfs.BlockModel, error) { +func (f *IPLDFetcher) FetchRcts(tx *sqlx.Tx, cids []eth.ReceiptModel) ([]ipfs.BlockModel, error) { log.Debug("fetching receipt iplds") rctIPLDs := make([]ipfs.BlockModel, len(cids)) for i, c := range cids { @@ -164,9 +168,9 @@ func (f *IPLDFetcher) FetchRcts(tx *sqlx.Tx, cids []ReceiptModel) ([]ipfs.BlockM } // FetchState fetches state nodes -func (f *IPLDFetcher) FetchState(tx *sqlx.Tx, cids []StateNodeModel) ([]StateNode, error) { +func (f *IPLDFetcher) FetchState(tx *sqlx.Tx, cids []eth.StateNodeModel) ([]eth.StateNode, error) { log.Debug("fetching state iplds") - stateNodes := make([]StateNode, 0, len(cids)) + stateNodes := make([]eth.StateNode, 0, len(cids)) for _, stateNode := range cids { if stateNode.CID == "" { continue @@ -175,7 +179,7 @@ func (f *IPLDFetcher) FetchState(tx *sqlx.Tx, cids []StateNodeModel) ([]StateNod if err != nil { return nil, err } - stateNodes = append(stateNodes, StateNode{ + stateNodes = append(stateNodes, eth.StateNode{ IPLD: ipfs.BlockModel{ Data: stateBytes, CID: stateNode.CID, @@ -189,9 +193,9 @@ func (f *IPLDFetcher) FetchState(tx *sqlx.Tx, cids []StateNodeModel) ([]StateNod } // FetchStorage fetches storage nodes -func (f *IPLDFetcher) FetchStorage(tx *sqlx.Tx, cids []StorageNodeWithStateKeyModel) ([]StorageNode, error) { +func (f *IPLDFetcher) FetchStorage(tx *sqlx.Tx, cids []eth.StorageNodeWithStateKeyModel) ([]eth.StorageNode, error) { log.Debug("fetching storage iplds") - storageNodes := make([]StorageNode, 0, len(cids)) + storageNodes := make([]eth.StorageNode, 0, len(cids)) for _, storageNode := range cids { if storageNode.CID == "" || storageNode.StateKey == "" { continue @@ -200,7 +204,7 @@ func (f *IPLDFetcher) FetchStorage(tx *sqlx.Tx, cids []StorageNodeWithStateKeyMo if err != nil { return nil, err } - storageNodes = append(storageNodes, StorageNode{ + storageNodes = append(storageNodes, eth.StorageNode{ IPLD: ipfs.BlockModel{ Data: storageBytes, CID: storageNode.CID, diff --git a/pkg/eth/ipld_fetcher_test.go b/pkg/eth/ipld_fetcher_test.go index c1165d8c..a370c774 100644 --- a/pkg/eth/ipld_fetcher_test.go +++ b/pkg/eth/ipld_fetcher_test.go @@ -20,15 +20,17 @@ import ( . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/eth" + eth2 "github.com/vulcanize/ipfs-blockchain-watcher/pkg/eth" "github.com/vulcanize/ipfs-blockchain-watcher/pkg/eth/mocks" "github.com/vulcanize/ipfs-blockchain-watcher/pkg/postgres" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/shared" + + "github.com/vulcanize/ipld-eth-server/pkg/eth" + "github.com/vulcanize/ipld-eth-server/pkg/shared" ) var ( db *postgres.DB - pubAndIndexer *eth.IPLDPublisher + pubAndIndexer *eth2.IPLDPublisher fetcher *eth.IPLDFetcher ) @@ -38,7 +40,7 @@ var _ = Describe("IPLDFetcher", func() { var err error db, err = shared.SetupDB() Expect(err).ToNot(HaveOccurred()) - pubAndIndexer = eth.NewIPLDPublisher(db) + pubAndIndexer = eth2.NewIPLDPublisher(db) err = pubAndIndexer.Publish(mocks.MockConvertedPayload) Expect(err).ToNot(HaveOccurred()) fetcher = eth.NewIPLDFetcher(db) @@ -48,10 +50,9 @@ var _ = Describe("IPLDFetcher", func() { }) It("Fetches and returns IPLDs for the CIDs provided in the CIDWrapper", func() { - i, err := fetcher.Fetch(mocks.MockCIDWrapper) + iplds, err := fetcher.Fetch(*mocks.MockCIDWrapper) Expect(err).ToNot(HaveOccurred()) - iplds, ok := i.(eth.IPLDs) - Expect(ok).To(BeTrue()) + Expect(iplds).ToNot(BeNil()) Expect(iplds.TotalDifficulty).To(Equal(mocks.MockConvertedPayload.TotalDifficulty)) Expect(iplds.BlockNumber).To(Equal(mocks.MockConvertedPayload.Block.Number())) Expect(iplds.Header).To(Equal(mocks.MockIPLDs.Header)) diff --git a/pkg/eth/mocks/converter.go b/pkg/eth/mocks/converter.go index 728d1871..50158a4e 100644 --- a/pkg/eth/mocks/converter.go +++ b/pkg/eth/mocks/converter.go @@ -21,8 +21,8 @@ import ( "github.com/ethereum/go-ethereum/statediff" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/eth" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/shared" + "github.com/vulcanize/ipld-eth-server/pkg/eth" + "github.com/vulcanize/ipld-eth-server/pkg/shared" ) // PayloadConverter is the underlying struct for the Converter interface diff --git a/pkg/eth/mocks/indexer.go b/pkg/eth/mocks/indexer.go index c01d4dd2..cee84767 100644 --- a/pkg/eth/mocks/indexer.go +++ b/pkg/eth/mocks/indexer.go @@ -19,9 +19,9 @@ package mocks import ( "fmt" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/shared" + "github.com/vulcanize/ipld-eth-server/pkg/shared" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/eth" + "github.com/vulcanize/ipld-eth-server/pkg/eth" ) // CIDIndexer is the underlying struct for the Indexer interface diff --git a/pkg/eth/mocks/publisher.go b/pkg/eth/mocks/publisher.go index c3e9a26a..5758b277 100644 --- a/pkg/eth/mocks/publisher.go +++ b/pkg/eth/mocks/publisher.go @@ -19,9 +19,9 @@ package mocks import ( "fmt" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/shared" + "github.com/vulcanize/ipld-eth-server/pkg/shared" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/eth" + "github.com/vulcanize/ipld-eth-server/pkg/eth" ) // IPLDPublisher is the underlying struct for the Publisher interface diff --git a/pkg/eth/subscription_config.go b/pkg/eth/subscription_config.go index b56585ee..d74ad3fd 100644 --- a/pkg/eth/subscription_config.go +++ b/pkg/eth/subscription_config.go @@ -20,8 +20,6 @@ import ( "math/big" "github.com/spf13/viper" - - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/shared" ) // SubscriptionSettings config is used by a subscriber to specify what eth data to stream from the watcher @@ -125,28 +123,3 @@ func NewEthSubscriptionConfig() (*SubscriptionSettings, error) { } return sc, nil } - -// StartingBlock satisfies the SubscriptionSettings() interface -func (sc *SubscriptionSettings) StartingBlock() *big.Int { - return sc.Start -} - -// EndingBlock satisfies the SubscriptionSettings() interface -func (sc *SubscriptionSettings) EndingBlock() *big.Int { - return sc.End -} - -// HistoricalData satisfies the SubscriptionSettings() interface -func (sc *SubscriptionSettings) HistoricalData() bool { - return sc.BackFill -} - -// HistoricalDataOnly satisfies the SubscriptionSettings() interface -func (sc *SubscriptionSettings) HistoricalDataOnly() bool { - return sc.BackFillOnly -} - -// ChainType satisfies the SubscriptionSettings() interface -func (sc *SubscriptionSettings) ChainType() shared.ChainType { - return shared.Ethereum -} diff --git a/pkg/eth/test_helpers.go b/pkg/eth/test_helpers.go index 5d241203..48fd71a2 100644 --- a/pkg/eth/test_helpers.go +++ b/pkg/eth/test_helpers.go @@ -19,6 +19,7 @@ package eth import ( . "github.com/onsi/gomega" + "github.com/vulcanize/ipfs-blockchain-watcher/pkg/eth" "github.com/vulcanize/ipfs-blockchain-watcher/pkg/postgres" ) @@ -45,7 +46,7 @@ func TearDownDB(db *postgres.DB) { } // TxModelsContainsCID used to check if a list of TxModels contains a specific cid string -func TxModelsContainsCID(txs []TxModel, cid string) bool { +func TxModelsContainsCID(txs []eth.TxModel, cid string) bool { for _, tx := range txs { if tx.CID == cid { return true @@ -55,7 +56,7 @@ func TxModelsContainsCID(txs []TxModel, cid string) bool { } // ListContainsBytes used to check if a list of byte arrays contains a particular byte array -func ReceiptModelsContainsCID(rcts []ReceiptModel, cid string) bool { +func ReceiptModelsContainsCID(rcts []eth.ReceiptModel, cid string) bool { for _, rct := range rcts { if rct.CID == cid { return true diff --git a/pkg/shared/env.go b/pkg/shared/env.go index b11c726e..c14774a4 100644 --- a/pkg/shared/env.go +++ b/pkg/shared/env.go @@ -17,79 +17,32 @@ package shared import ( - "github.com/ethereum/go-ethereum/rpc" - - "github.com/btcsuite/btcd/rpcclient" "github.com/spf13/viper" "github.com/vulcanize/ipfs-blockchain-watcher/pkg/node" ) // Env variables const ( - HTTP_TIMEOUT = "HTTP_TIMEOUT" - - ETH_WS_PATH = "ETH_WS_PATH" - ETH_HTTP_PATH = "ETH_HTTP_PATH" ETH_NODE_ID = "ETH_NODE_ID" ETH_CLIENT_NAME = "ETH_CLIENT_NAME" ETH_GENESIS_BLOCK = "ETH_GENESIS_BLOCK" ETH_NETWORK_ID = "ETH_NETWORK_ID" ETH_CHAIN_ID = "ETH_CHAIN_ID" - - BTC_WS_PATH = "BTC_WS_PATH" - BTC_HTTP_PATH = "BTC_HTTP_PATH" - BTC_NODE_PASSWORD = "BTC_NODE_PASSWORD" - BTC_NODE_USER = "BTC_NODE_USER" - BTC_NODE_ID = "BTC_NODE_ID" - BTC_CLIENT_NAME = "BTC_CLIENT_NAME" - BTC_GENESIS_BLOCK = "BTC_GENESIS_BLOCK" - BTC_NETWORK_ID = "BTC_NETWORK_ID" - BTC_CHAIN_ID = "BTC_CHAIN_ID" ) -// GetEthNodeAndClient returns eth node info and client from path url -func GetEthNodeAndClient(path string) (node.Node, *rpc.Client, error) { +// GetNodeInfo returns the ethereum node info from env variables +func GetNodeInfo() node.Info { viper.BindEnv("ethereum.nodeID", ETH_NODE_ID) viper.BindEnv("ethereum.clientName", ETH_CLIENT_NAME) viper.BindEnv("ethereum.genesisBlock", ETH_GENESIS_BLOCK) viper.BindEnv("ethereum.networkID", ETH_NETWORK_ID) viper.BindEnv("ethereum.chainID", ETH_CHAIN_ID) - rpcClient, err := rpc.Dial(path) - if err != nil { - return node.Node{}, nil, err - } - return node.Node{ + return node.Info{ ID: viper.GetString("ethereum.nodeID"), ClientName: viper.GetString("ethereum.clientName"), GenesisBlock: viper.GetString("ethereum.genesisBlock"), NetworkID: viper.GetString("ethereum.networkID"), ChainID: viper.GetUint64("ethereum.chainID"), - }, rpcClient, nil -} - -// GetBtcNodeAndClient returns btc node info from path url -func GetBtcNodeAndClient(path string) (node.Node, *rpcclient.ConnConfig) { - viper.BindEnv("bitcoin.nodeID", BTC_NODE_ID) - viper.BindEnv("bitcoin.clientName", BTC_CLIENT_NAME) - viper.BindEnv("bitcoin.genesisBlock", BTC_GENESIS_BLOCK) - viper.BindEnv("bitcoin.networkID", BTC_NETWORK_ID) - viper.BindEnv("bitcoin.pass", BTC_NODE_PASSWORD) - viper.BindEnv("bitcoin.user", BTC_NODE_USER) - viper.BindEnv("bitcoin.chainID", BTC_CHAIN_ID) - - // For bitcoin we load in node info from the config because there is no RPC endpoint to retrieve this from the node - return node.Node{ - ID: viper.GetString("bitcoin.nodeID"), - ClientName: viper.GetString("bitcoin.clientName"), - GenesisBlock: viper.GetString("bitcoin.genesisBlock"), - NetworkID: viper.GetString("bitcoin.networkID"), - ChainID: viper.GetUint64("bitcoin.chainID"), - }, &rpcclient.ConnConfig{ - Host: path, - HTTPPostMode: true, // Bitcoin core only supports HTTP POST mode - DisableTLS: true, // Bitcoin core does not provide TLS by default - Pass: viper.GetString("bitcoin.pass"), - User: viper.GetString("bitcoin.user"), - } + } } diff --git a/pkg/shared/mocks/payload_fetcher.go b/pkg/shared/mocks/payload_fetcher.go index 1b3e2788..218cd923 100644 --- a/pkg/shared/mocks/payload_fetcher.go +++ b/pkg/shared/mocks/payload_fetcher.go @@ -20,7 +20,7 @@ import ( "errors" "sync/atomic" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/shared" + "github.com/vulcanize/ipld-eth-server/pkg/shared" ) // PayloadFetcher mock for tests diff --git a/pkg/shared/mocks/retriever.go b/pkg/shared/mocks/retriever.go index b878cea5..ba3843f9 100644 --- a/pkg/shared/mocks/retriever.go +++ b/pkg/shared/mocks/retriever.go @@ -18,7 +18,7 @@ package mocks import ( "github.com/vulcanize/ipfs-blockchain-watcher/pkg/postgres" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/shared" + "github.com/vulcanize/ipld-eth-server/pkg/shared" ) // CIDRetriever is a mock CID retriever for use in tests diff --git a/pkg/shared/mocks/streamer.go b/pkg/shared/mocks/streamer.go index 1fdb49be..daf683eb 100644 --- a/pkg/shared/mocks/streamer.go +++ b/pkg/shared/mocks/streamer.go @@ -18,7 +18,7 @@ package mocks import ( "github.com/ethereum/go-ethereum/rpc" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/shared" + "github.com/vulcanize/ipld-eth-server/pkg/shared" ) // PayloadStreamer mock struct diff --git a/pkg/shared/test_helpers.go b/pkg/shared/test_helpers.go index 09291d2f..38eead8e 100644 --- a/pkg/shared/test_helpers.go +++ b/pkg/shared/test_helpers.go @@ -19,10 +19,11 @@ package shared import ( "bytes" + "github.com/vulcanize/ipfs-blockchain-watcher/pkg/eth" + "github.com/ipfs/go-cid" "github.com/multiformats/go-multihash" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/config" "github.com/vulcanize/ipfs-blockchain-watcher/pkg/ipfs" "github.com/vulcanize/ipfs-blockchain-watcher/pkg/node" "github.com/vulcanize/ipfs-blockchain-watcher/pkg/postgres" @@ -30,11 +31,11 @@ import ( // SetupDB is use to setup a db for watcher tests func SetupDB() (*postgres.DB, error) { - return postgres.NewDB(config.Database{ + return postgres.NewDB(postgres.Config{ Hostname: "localhost", Name: "vulcanize_testing", Port: 5432, - }, node.Node{}) + }, node.Info{}) } // ListContainsString used to check if a list of strings contains a particular string @@ -58,7 +59,7 @@ func IPLDsContainBytes(iplds []ipfs.BlockModel, b []byte) bool { } // ListContainsGap used to check if a list of Gaps contains a particular Gap -func ListContainsGap(gapList []Gap, gap Gap) bool { +func ListContainsGap(gapList []eth.DBGap, gap eth.DBGap) bool { for _, listGap := range gapList { if listGap == gap { return true diff --git a/pkg/watch/api.go b/pkg/watch/api.go index 6154cfe3..fd95b79d 100644 --- a/pkg/watch/api.go +++ b/pkg/watch/api.go @@ -20,15 +20,14 @@ import ( "context" "github.com/ethereum/go-ethereum/p2p" - "github.com/ethereum/go-ethereum/rlp" "github.com/ethereum/go-ethereum/rpc" log "github.com/sirupsen/logrus" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/btc" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/eth" "github.com/vulcanize/ipfs-blockchain-watcher/pkg/node" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/shared" - v "github.com/vulcanize/ipfs-blockchain-watcher/version" + + "github.com/vulcanize/ipld-eth-server/pkg/eth" + "github.com/vulcanize/ipld-eth-server/pkg/shared" + v "github.com/vulcanize/ipld-eth-server/version" ) // APIName is the namespace used for the state diffing service API @@ -50,24 +49,7 @@ func NewPublicWatcherAPI(w Watcher) *PublicWatcherAPI { } // Stream is the public method to setup a subscription that fires off IPLD payloads as they are processed -func (api *PublicWatcherAPI) Stream(ctx context.Context, rlpParams []byte) (*rpc.Subscription, error) { - var params shared.SubscriptionSettings - switch api.w.Chain() { - case shared.Ethereum: - var ethParams eth.SubscriptionSettings - if err := rlp.DecodeBytes(rlpParams, ðParams); err != nil { - return nil, err - } - params = ðParams - case shared.Bitcoin: - var btcParams btc.SubscriptionSettings - if err := rlp.DecodeBytes(rlpParams, &btcParams); err != nil { - return nil, err - } - params = &btcParams - default: - panic("ipfs-blockchain-watcher is not configured for a specific chain type") - } +func (api *PublicWatcherAPI) Stream(ctx context.Context, params eth.SubscriptionSettings) (*rpc.Subscription, error) { // ensure that the RPC connection supports subscriptions notifier, supported := rpc.NotifierFromContext(ctx) if !supported { @@ -107,7 +89,7 @@ func (api *PublicWatcherAPI) Stream(ctx context.Context, rlpParams []byte) (*rpc // Node is a public rpc method to allow transformers to fetch the node info for the watcher // NOTE: this is the node info for the node that the watcher is syncing from, not the node info for the watcher itself -func (api *PublicWatcherAPI) Node() *node.Node { +func (api *PublicWatcherAPI) Node() *node.Info { return api.w.Node() } @@ -136,7 +118,7 @@ func (iapi *InfoAPI) NodeInfo() *p2p.NodeInfo { return &p2p.NodeInfo{ // TODO: formalize this ID: "vulcanizeDB", - Name: "ipfs-blockchain-watcher", + Name: "ipld-eth-server", } } diff --git a/pkg/watch/config.go b/pkg/watch/config.go index 2f8bac20..45880186 100644 --- a/pkg/watch/config.go +++ b/pkg/watch/config.go @@ -17,33 +17,22 @@ package watch import ( - "fmt" "os" "path/filepath" "github.com/spf13/viper" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/config" "github.com/vulcanize/ipfs-blockchain-watcher/pkg/node" "github.com/vulcanize/ipfs-blockchain-watcher/pkg/postgres" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/shared" - "github.com/vulcanize/ipfs-blockchain-watcher/utils" + + "github.com/vulcanize/ipld-eth-server/utils" ) // Env variables const ( - SUPERNODE_CHAIN = "SUPERNODE_CHAIN" - SUPERNODE_SYNC = "SUPERNODE_SYNC" - SUPERNODE_WORKERS = "SUPERNODE_WORKERS" - SUPERNODE_SERVER = "SUPERNODE_SERVER" - SUPERNODE_WS_PATH = "SUPERNODE_WS_PATH" - SUPERNODE_IPC_PATH = "SUPERNODE_IPC_PATH" - SUPERNODE_HTTP_PATH = "SUPERNODE_HTTP_PATH" - SUPERNODE_BACKFILL = "SUPERNODE_BACKFILL" - - SYNC_MAX_IDLE_CONNECTIONS = "SYNC_MAX_IDLE_CONNECTIONS" - SYNC_MAX_OPEN_CONNECTIONS = "SYNC_MAX_OPEN_CONNECTIONS" - SYNC_MAX_CONN_LIFETIME = "SYNC_MAX_CONN_LIFETIME" + SERVER_WS_PATH = "SERVER_WS_PATH" + SERVER_IPC_PATH = "SERVER_IPC_PATH" + SERVER_HTTP_PATH = "SERVER_HTTP_PATH" SERVER_MAX_IDLE_CONNECTIONS = "SERVER_MAX_IDLE_CONNECTIONS" SERVER_MAX_OPEN_CONNECTIONS = "SERVER_MAX_OPEN_CONNECTIONS" @@ -52,97 +41,47 @@ const ( // Config struct type Config struct { - Chain shared.ChainType - DBConfig config.Database - // Server fields - Serve bool - ServeDBConn *postgres.DB + DB *postgres.DB + DBConfig postgres.Config WSEndpoint string HTTPEndpoint string IPCEndpoint string - // Sync params - Sync bool - SyncDBConn *postgres.DB - Workers int - WSClient interface{} - NodeInfo node.Node - // Historical switch - Historical bool + NodeInfo node.Info } // NewConfig is used to initialize a watcher config from a .toml file // Separate chain watcher instances need to be ran with separate ipfs path in order to avoid lock contention on the ipfs repository lockfile func NewConfig() (*Config, error) { c := new(Config) - var err error - viper.BindEnv("watcher.chain", SUPERNODE_CHAIN) - viper.BindEnv("watcher.sync", SUPERNODE_SYNC) - viper.BindEnv("watcher.workers", SUPERNODE_WORKERS) - viper.BindEnv("ethereum.wsPath", shared.ETH_WS_PATH) - viper.BindEnv("bitcoin.wsPath", shared.BTC_WS_PATH) - viper.BindEnv("watcher.server", SUPERNODE_SERVER) - viper.BindEnv("watcher.wsPath", SUPERNODE_WS_PATH) - viper.BindEnv("watcher.ipcPath", SUPERNODE_IPC_PATH) - viper.BindEnv("watcher.httpPath", SUPERNODE_HTTP_PATH) - viper.BindEnv("watcher.backFill", SUPERNODE_BACKFILL) - - c.Historical = viper.GetBool("watcher.backFill") - chain := viper.GetString("watcher.chain") - c.Chain, err = shared.NewChainType(chain) - if err != nil { - return nil, err - } + viper.BindEnv("server.wsPath", SERVER_WS_PATH) + viper.BindEnv("server.ipcPath", SERVER_IPC_PATH) + viper.BindEnv("server.httpPath", SERVER_HTTP_PATH) c.DBConfig.Init() - c.Sync = viper.GetBool("watcher.sync") - if c.Sync { - workers := viper.GetInt("watcher.workers") - if workers < 1 { - workers = 1 - } - c.Workers = workers - switch c.Chain { - case shared.Ethereum: - ethWS := viper.GetString("ethereum.wsPath") - c.NodeInfo, c.WSClient, err = shared.GetEthNodeAndClient(fmt.Sprintf("ws://%s", ethWS)) - if err != nil { - return nil, err - } - case shared.Bitcoin: - btcWS := viper.GetString("bitcoin.wsPath") - c.NodeInfo, c.WSClient = shared.GetBtcNodeAndClient(btcWS) - } - syncDBConn := overrideDBConnConfig(c.DBConfig, Sync) - syncDB := utils.LoadPostgres(syncDBConn, c.NodeInfo) - c.SyncDBConn = &syncDB - } - c.Serve = viper.GetBool("watcher.server") - if c.Serve { - wsPath := viper.GetString("watcher.wsPath") - if wsPath == "" { - wsPath = "127.0.0.1:8080" - } - c.WSEndpoint = wsPath - ipcPath := viper.GetString("watcher.ipcPath") - if ipcPath == "" { - home, err := os.UserHomeDir() - if err != nil { - return nil, err - } - ipcPath = filepath.Join(home, ".vulcanize/vulcanize.ipc") - } - c.IPCEndpoint = ipcPath - httpPath := viper.GetString("watcher.httpPath") - if httpPath == "" { - httpPath = "127.0.0.1:8081" - } - c.HTTPEndpoint = httpPath - serveDBConn := overrideDBConnConfig(c.DBConfig, Serve) - serveDB := utils.LoadPostgres(serveDBConn, c.NodeInfo) - c.ServeDBConn = &serveDB + wsPath := viper.GetString("watcher.wsPath") + if wsPath == "" { + wsPath = "127.0.0.1:8080" } + c.WSEndpoint = wsPath + ipcPath := viper.GetString("watcher.ipcPath") + if ipcPath == "" { + home, err := os.UserHomeDir() + if err != nil { + return nil, err + } + ipcPath = filepath.Join(home, ".vulcanize/vulcanize.ipc") + } + c.IPCEndpoint = ipcPath + httpPath := viper.GetString("watcher.httpPath") + if httpPath == "" { + httpPath = "127.0.0.1:8081" + } + c.HTTPEndpoint = httpPath + overrideDBConnConfig(&c.DBConfig) + serveDB := utils.LoadPostgres(c.DBConfig, c.NodeInfo) + c.DB = &serveDB return c, nil } @@ -154,23 +93,11 @@ var ( Serve mode = "serve" ) -func overrideDBConnConfig(con config.Database, m mode) config.Database { - switch m { - case Sync: - viper.BindEnv("database.sync.maxIdle", SYNC_MAX_IDLE_CONNECTIONS) - viper.BindEnv("database.sync.maxOpen", SYNC_MAX_OPEN_CONNECTIONS) - viper.BindEnv("database.sync.maxLifetime", SYNC_MAX_CONN_LIFETIME) - con.MaxIdle = viper.GetInt("database.sync.maxIdle") - con.MaxOpen = viper.GetInt("database.sync.maxOpen") - con.MaxLifetime = viper.GetInt("database.sync.maxLifetime") - case Serve: - viper.BindEnv("database.server.maxIdle", SERVER_MAX_IDLE_CONNECTIONS) - viper.BindEnv("database.server.maxOpen", SERVER_MAX_OPEN_CONNECTIONS) - viper.BindEnv("database.server.maxLifetime", SERVER_MAX_CONN_LIFETIME) - con.MaxIdle = viper.GetInt("database.server.maxIdle") - con.MaxOpen = viper.GetInt("database.server.maxOpen") - con.MaxLifetime = viper.GetInt("database.server.maxLifetime") - default: - } - return con +func overrideDBConnConfig(con *postgres.Config) { + viper.BindEnv("database.server.maxIdle", SERVER_MAX_IDLE_CONNECTIONS) + viper.BindEnv("database.server.maxOpen", SERVER_MAX_OPEN_CONNECTIONS) + viper.BindEnv("database.server.maxLifetime", SERVER_MAX_CONN_LIFETIME) + con.MaxIdle = viper.GetInt("database.server.maxIdle") + con.MaxOpen = viper.GetInt("database.server.maxOpen") + con.MaxLifetime = viper.GetInt("database.server.maxLifetime") } diff --git a/pkg/watch/service.go b/pkg/watch/service.go index 74cb7422..b3ce55bf 100644 --- a/pkg/watch/service.go +++ b/pkg/watch/service.go @@ -28,10 +28,12 @@ import ( "github.com/ethereum/go-ethereum/rpc" log "github.com/sirupsen/logrus" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/builders" + eth2 "github.com/vulcanize/ipfs-blockchain-watcher/pkg/eth" "github.com/vulcanize/ipfs-blockchain-watcher/pkg/node" "github.com/vulcanize/ipfs-blockchain-watcher/pkg/postgres" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/shared" + + "github.com/vulcanize/ipld-eth-server/pkg/eth" + "github.com/vulcanize/ipld-eth-server/pkg/shared" ) const ( @@ -44,16 +46,14 @@ const ( type Watcher interface { // APIs(), Protocols(), Start() and Stop() ethnode.Service - // Data processing event loop - Sync(wg *sync.WaitGroup, forwardPayloadChan chan<- shared.ConvertedData) error // Pub-Sub handling event loop - Serve(wg *sync.WaitGroup, screenAndServePayload <-chan shared.ConvertedData) + Serve(wg *sync.WaitGroup, screenAndServePayload <-chan eth2.ConvertedPayload) // Method to subscribe to the service - Subscribe(id rpc.ID, sub chan<- SubscriptionPayload, quitChan chan<- bool, params shared.SubscriptionSettings) + Subscribe(id rpc.ID, sub chan<- SubscriptionPayload, quitChan chan<- bool, params eth.SubscriptionSettings) // Method to unsubscribe from the service Unsubscribe(id rpc.ID) // Method to access the node info for the service - Node() *node.Node + Node() *node.Info // Method to access chain type Chain() shared.ChainType } @@ -62,32 +62,20 @@ type Watcher interface { type Service struct { // Used to sync access to the Subscriptions sync.Mutex - // Interface for streaming payloads over an rpc subscription - Streamer shared.PayloadStreamer - // Interface for converting raw payloads into IPLD object payloads - Converter shared.PayloadConverter - // Interface for publishing and indexing the PG-IPLD payloads - Publisher shared.IPLDPublisher // Interface for filtering and serving data according to subscribed clients according to their specification - Filterer shared.ResponseFilterer + Filterer eth.Filterer // Interface for fetching IPLD objects from IPFS - IPLDFetcher shared.IPLDFetcher + IPLDFetcher eth.Fetcher // Interface for searching and retrieving CIDs from Postgres index - Retriever shared.CIDRetriever - // Chan the processor uses to subscribe to payloads from the Streamer - PayloadChan chan shared.RawChainData + Retriever eth.Retriever // Used to signal shutdown of the service QuitChan chan bool // A mapping of rpc.IDs to their subscription channels, mapped to their subscription type (hash of the StreamFilters) Subscriptions map[common.Hash]map[rpc.ID]Subscription // A mapping of subscription params hash to the corresponding subscription params - SubscriptionTypes map[common.Hash]shared.SubscriptionSettings + SubscriptionTypes map[common.Hash]eth.SubscriptionSettings // Info for the Geth node that this watcher is working with - NodeInfo *node.Node - // Number of publish workers - WorkerPoolSize int - // chain type for this service - chain shared.ChainType + NodeInfo *node.Info // Underlying db db *postgres.DB // wg for syncing serve processes @@ -97,44 +85,14 @@ type Service struct { // NewWatcher creates a new Watcher using an underlying Service struct func NewWatcher(settings *Config) (Watcher, error) { sn := new(Service) - var err error - // If we are syncing, initialize the needed interfaces - if settings.Sync { - sn.Streamer, sn.PayloadChan, err = builders.NewPayloadStreamer(settings.Chain, settings.WSClient) - if err != nil { - return nil, err - } - sn.Converter, err = builders.NewPayloadConverter(settings.Chain, settings.NodeInfo.ChainID) - if err != nil { - return nil, err - } - sn.Publisher, err = builders.NewIPLDPublisher(settings.Chain, settings.SyncDBConn) - if err != nil { - return nil, err - } - sn.Filterer, err = builders.NewResponseFilterer(settings.Chain) - if err != nil { - return nil, err - } - } - // If we are serving, initialize the needed interfaces - if settings.Serve { - sn.Retriever, err = builders.NewCIDRetriever(settings.Chain, settings.ServeDBConn) - if err != nil { - return nil, err - } - sn.IPLDFetcher, err = builders.NewIPLDFetcher(settings.Chain, settings.ServeDBConn) - if err != nil { - return nil, err - } - sn.db = settings.ServeDBConn - } + sn.Retriever = eth.NewCIDRetriever(settings.DB) + sn.IPLDFetcher = eth.NewIPLDFetcher(settings.DB) + sn.Filterer = eth.NewResponseFilterer() + sn.db = settings.DB sn.QuitChan = make(chan bool) sn.Subscriptions = make(map[common.Hash]map[rpc.ID]Subscription) - sn.SubscriptionTypes = make(map[common.Hash]shared.SubscriptionSettings) - sn.WorkerPoolSize = settings.Workers + sn.SubscriptionTypes = make(map[common.Hash]eth.SubscriptionSettings) sn.NodeInfo = &settings.NodeInfo - sn.chain = settings.Chain return sn, nil } @@ -172,91 +130,24 @@ func (sap *Service) APIs() []rpc.API { Public: true, }, } - chainAPI, err := builders.NewPublicAPI(sap.chain, sap.db) + backend, err := eth.NewEthBackend(sap.db) if err != nil { log.Error(err) - return apis - } - return append(apis, chainAPI) -} - -// Sync streams incoming raw chain data and converts it for further processing -// It forwards the converted data to the publish process(es) it spins up -// If forwards the converted data to a ScreenAndServe process if it there is one listening on the passed screenAndServePayload channel -// This continues on no matter if or how many subscribers there are -func (sap *Service) Sync(wg *sync.WaitGroup, screenAndServePayload chan<- shared.ConvertedData) error { - sub, err := sap.Streamer.Stream(sap.PayloadChan) - if err != nil { - return err - } - // spin up publish worker goroutines - publishPayload := make(chan shared.ConvertedData, PayloadChanBufferSize) - for i := 1; i <= sap.WorkerPoolSize; i++ { - go sap.publish(wg, i, publishPayload) - log.Debugf("%s publish worker %d successfully spun up", sap.chain.String(), i) - } - go func() { - wg.Add(1) - defer wg.Done() - for { - select { - case payload := <-sap.PayloadChan: - ipldPayload, err := sap.Converter.Convert(payload) - if err != nil { - log.Errorf("watcher conversion error for chain %s: %v", sap.chain.String(), err) - continue - } - log.Infof("%s data streamed at head height %d", sap.chain.String(), ipldPayload.Height()) - // If we have a ScreenAndServe process running, forward the iplds to it - select { - case screenAndServePayload <- ipldPayload: - default: - } - // Forward the payload to the publish workers - // this channel acts as a ring buffer - select { - case publishPayload <- ipldPayload: - default: - <-publishPayload - publishPayload <- ipldPayload - } - case err := <-sub.Err(): - log.Errorf("watcher subscription error for chain %s: %v", sap.chain.String(), err) - case <-sap.QuitChan: - log.Infof("quiting %s Sync process", sap.chain.String()) - return - } - } - }() - log.Infof("%s Sync goroutine successfully spun up", sap.chain.String()) - return nil -} - -// publish is spun up by SyncAndConvert and receives converted chain data from that process -// it publishes this data to IPFS and indexes their CIDs with useful metadata in Postgres -func (sap *Service) publish(wg *sync.WaitGroup, id int, publishPayload <-chan shared.ConvertedData) { - wg.Add(1) - defer wg.Done() - for { - select { - case payload := <-publishPayload: - log.Debugf("%s watcher sync worker %d publishing and indexing data streamed at head height %d", sap.chain.String(), id, payload.Height()) - if err := sap.Publisher.Publish(payload); err != nil { - log.Errorf("%s watcher publish worker %d publishing error: %v", sap.chain.String(), id, err) - continue - } - case <-sap.QuitChan: - log.Infof("%s watcher publish worker %d shutting down", sap.chain.String(), id) - return - } + return nil } + return append(apis, rpc.API{ + Namespace: eth.APIName, + Version: eth.APIVersion, + Service: eth.NewPublicEthAPI(backend), + Public: true, + }) } // Serve listens for incoming converter data off the screenAndServePayload from the Sync process // It filters and sends this data to any subscribers to the service // This process can also be stood up alone, without an screenAndServePayload attached to a Sync process // and it will hang on the WaitGroup indefinitely, allowing the Service to serve historical data requests only -func (sap *Service) Serve(wg *sync.WaitGroup, screenAndServePayload <-chan shared.ConvertedData) { +func (sap *Service) Serve(wg *sync.WaitGroup, screenAndServePayload <-chan eth2.ConvertedPayload) { sap.serveWg = wg go func() { wg.Add(1) @@ -266,17 +157,17 @@ func (sap *Service) Serve(wg *sync.WaitGroup, screenAndServePayload <-chan share case payload := <-screenAndServePayload: sap.filterAndServe(payload) case <-sap.QuitChan: - log.Infof("quiting %s Serve process", sap.chain.String()) + log.Info("quiting eth ipld server process") return } } }() - log.Infof("%s Serve goroutine successfully spun up", sap.chain.String()) + log.Info("eth ipld server process successfully spun up") } // filterAndServe filters the payload according to each subscription type and sends to the subscriptions -func (sap *Service) filterAndServe(payload shared.ConvertedData) { - log.Debugf("sending %s payload to subscriptions", sap.chain.String()) +func (sap *Service) filterAndServe(payload eth2.ConvertedPayload) { + log.Debug("sending eth ipld payload to subscriptions") sap.Lock() sap.serveWg.Add(1) defer sap.Unlock() @@ -285,11 +176,11 @@ func (sap *Service) filterAndServe(payload shared.ConvertedData) { // Retrieve the subscription parameters for this subscription type subConfig, ok := sap.SubscriptionTypes[ty] if !ok { - log.Errorf("watcher %s subscription configuration for subscription type %s not available", sap.chain.String(), ty.Hex()) + log.Errorf("eth ipld server subscription configuration for subscription type %s not available", ty.Hex()) sap.closeType(ty) continue } - if subConfig.EndingBlock().Int64() > 0 && subConfig.EndingBlock().Int64() < payload.Height() { + if subConfig.End.Int64() > 0 && subConfig.End.Int64() < payload.Block.Number().Int64() { // We are not out of range for this subscription type // close it, and continue to the next sap.closeType(ty) @@ -297,21 +188,21 @@ func (sap *Service) filterAndServe(payload shared.ConvertedData) { } response, err := sap.Filterer.Filter(subConfig, payload) if err != nil { - log.Errorf("watcher filtering error for chain %s: %v", sap.chain.String(), err) + log.Errorf("eth ipld server filtering error: %v", err) sap.closeType(ty) continue } responseRLP, err := rlp.EncodeToBytes(response) if err != nil { - log.Errorf("watcher rlp encoding error for chain %s: %v", sap.chain.String(), err) + log.Errorf("eth ipld server rlp encoding error: %v", err) continue } for id, sub := range subs { select { - case sub.PayloadChan <- SubscriptionPayload{Data: responseRLP, Err: "", Flag: EmptyFlag, Height: response.Height()}: - log.Debugf("sending watcher %s payload to subscription %s", sap.chain.String(), id) + case sub.PayloadChan <- SubscriptionPayload{Data: responseRLP, Err: "", Flag: EmptyFlag, Height: response.BlockNumber.Int64()}: + log.Debugf("sending eth ipld server payload to subscription %s", id) default: - log.Infof("unable to send %s payload to subscription %s; channel has no receiver", sap.chain.String(), id) + log.Infof("unable to send eth ipld payload to subscription %s; channel has no receiver", id) } } } @@ -319,20 +210,15 @@ func (sap *Service) filterAndServe(payload shared.ConvertedData) { // Subscribe is used by the API to remotely subscribe to the service loop // The params must be rlp serializable and satisfy the SubscriptionSettings() interface -func (sap *Service) Subscribe(id rpc.ID, sub chan<- SubscriptionPayload, quitChan chan<- bool, params shared.SubscriptionSettings) { +func (sap *Service) Subscribe(id rpc.ID, sub chan<- SubscriptionPayload, quitChan chan<- bool, params eth.SubscriptionSettings) { sap.serveWg.Add(1) defer sap.serveWg.Done() - log.Infof("New %s subscription %s", sap.chain.String(), id) + log.Infof("new eth ipld subscription %s", id) subscription := Subscription{ ID: id, PayloadChan: sub, QuitChan: quitChan, } - if params.ChainType() != sap.chain { - sendNonBlockingErr(subscription, fmt.Errorf("subscription %s is for chain %s, service supports chain %s", id, params.ChainType().String(), sap.chain.String())) - sendNonBlockingQuit(subscription) - return - } // Subscription type is defined as the hash of the rlp-serialized subscription settings by, err := rlp.EncodeToBytes(params) if err != nil { @@ -341,7 +227,7 @@ func (sap *Service) Subscribe(id rpc.ID, sub chan<- SubscriptionPayload, quitCha return } subscriptionType := crypto.Keccak256Hash(by) - if !params.HistoricalDataOnly() { + if !params.BackFillOnly { // Add subscriber sap.Lock() if sap.Subscriptions[subscriptionType] == nil { @@ -353,9 +239,9 @@ func (sap *Service) Subscribe(id rpc.ID, sub chan<- SubscriptionPayload, quitCha } // If the subscription requests a backfill, use the Postgres index to lookup and retrieve historical data // Otherwise we only filter new data as it is streamed in from the state diffing geth node - if params.HistoricalData() || params.HistoricalDataOnly() { + if params.BackFill || params.BackFillOnly { if err := sap.sendHistoricalData(subscription, id, params); err != nil { - sendNonBlockingErr(subscription, fmt.Errorf("%s watcher subscriber backfill error: %v", sap.chain.String(), err)) + sendNonBlockingErr(subscription, fmt.Errorf("eth ipld server subscription backfill error: %v", err)) sendNonBlockingQuit(subscription) return } @@ -363,8 +249,8 @@ func (sap *Service) Subscribe(id rpc.ID, sub chan<- SubscriptionPayload, quitCha } // sendHistoricalData sends historical data to the requesting subscription -func (sap *Service) sendHistoricalData(sub Subscription, id rpc.ID, params shared.SubscriptionSettings) error { - log.Infof("Sending %s historical data to subscription %s", sap.chain.String(), id) +func (sap *Service) sendHistoricalData(sub Subscription, id rpc.ID, params eth.SubscriptionSettings) error { + log.Infof("sending eth ipld historical data to subscription %s", id) // Retrieve cached CIDs relevant to this subscriber var endingBlock int64 var startingBlock int64 @@ -373,31 +259,31 @@ func (sap *Service) sendHistoricalData(sub Subscription, id rpc.ID, params share if err != nil { return err } - if startingBlock < params.StartingBlock().Int64() { - startingBlock = params.StartingBlock().Int64() + if startingBlock < params.Start.Int64() { + startingBlock = params.Start.Int64() } endingBlock, err = sap.Retriever.RetrieveLastBlockNumber() if err != nil { return err } - if endingBlock > params.EndingBlock().Int64() && params.EndingBlock().Int64() > 0 && params.EndingBlock().Int64() > startingBlock { - endingBlock = params.EndingBlock().Int64() + if endingBlock > params.End.Int64() && params.End.Int64() > 0 && params.End.Int64() > startingBlock { + endingBlock = params.End.Int64() } - log.Debugf("%s historical data starting block: %d", sap.chain.String(), params.StartingBlock().Int64()) - log.Debugf("%s historical data ending block: %d", sap.chain.String(), endingBlock) + log.Debugf("eth ipld historical data starting block: %d", params.Start.Int64()) + log.Debugf("eth ipld historical data ending block: %d", endingBlock) go func() { sap.serveWg.Add(1) defer sap.serveWg.Done() for i := startingBlock; i <= endingBlock; i++ { select { case <-sap.QuitChan: - log.Infof("%s watcher historical data feed to subscription %s closed", sap.chain.String(), id) + log.Infof("%s watcher historical data feed to subscription %s closed", id) return default: } cidWrappers, empty, err := sap.Retriever.Retrieve(params, i) if err != nil { - sendNonBlockingErr(sub, fmt.Errorf(" %s watcher CID Retrieval error at block %d\r%s", sap.chain.String(), i, err.Error())) + sendNonBlockingErr(sub, fmt.Errorf("eth ipld server cid retrieval error at block %d\r%s", i, err.Error())) continue } if empty { @@ -406,7 +292,7 @@ func (sap *Service) sendHistoricalData(sub Subscription, id rpc.ID, params share for _, cids := range cidWrappers { response, err := sap.IPLDFetcher.Fetch(cids) if err != nil { - sendNonBlockingErr(sub, fmt.Errorf("%s watcher IPLD Fetching error at block %d\r%s", sap.chain.String(), i, err.Error())) + sendNonBlockingErr(sub, fmt.Errorf("eth ipld server ipld fetching error at block %d\r%s", i, err.Error())) continue } responseRLP, err := rlp.EncodeToBytes(response) @@ -415,19 +301,19 @@ func (sap *Service) sendHistoricalData(sub Subscription, id rpc.ID, params share continue } select { - case sub.PayloadChan <- SubscriptionPayload{Data: responseRLP, Err: "", Flag: EmptyFlag, Height: response.Height()}: - log.Debugf("sending watcher historical data payload to %s subscription %s", sap.chain.String(), id) + case sub.PayloadChan <- SubscriptionPayload{Data: responseRLP, Err: "", Flag: EmptyFlag, Height: response.BlockNumber.Int64()}: + log.Debugf("eth ipld server sending historical data payload to subscription %s", id) default: - log.Infof("unable to send backFill payload to %s subscription %s; channel has no receiver", sap.chain.String(), id) + log.Infof("eth ipld server unable to send backFill payload to subscription %s; channel has no receiver", id) } } } // when we are done backfilling send an empty payload signifying so in the msg select { case sub.PayloadChan <- SubscriptionPayload{Data: nil, Err: "", Flag: BackFillCompleteFlag}: - log.Debugf("sending backFill completion notice to %s subscription %s", sap.chain.String(), id) + log.Debugf("eth ipld server sending backFill completion notice to subscription %s", id) default: - log.Infof("unable to send backFill completion notice to %s subscription %s", sap.chain.String(), id) + log.Infof("eth ipld server unable to send backFill completion notice to %s subscription %s", id) } }() return nil @@ -435,7 +321,7 @@ func (sap *Service) sendHistoricalData(sub Subscription, id rpc.ID, params share // Unsubscribe is used by the API to remotely unsubscribe to the StateDiffingService loop func (sap *Service) Unsubscribe(id rpc.ID) { - log.Infof("Unsubscribing %s from the %s watcher service", id, sap.chain.String()) + log.Infof("unsubscribing %s from the eth ipld server", id) sap.Lock() for ty := range sap.Subscriptions { delete(sap.Subscriptions[ty], id) @@ -451,12 +337,9 @@ func (sap *Service) Unsubscribe(id rpc.ID) { // Start is used to begin the service // This is mostly just to satisfy the node.Service interface func (sap *Service) Start(*p2p.Server) error { - log.Infof("Starting %s watcher service", sap.chain.String()) + log.Info("starting eth ipld server") wg := new(sync.WaitGroup) - payloadChan := make(chan shared.ConvertedData, PayloadChanBufferSize) - if err := sap.Sync(wg, payloadChan); err != nil { - return err - } + payloadChan := make(chan eth2.ConvertedPayload, PayloadChanBufferSize) sap.Serve(wg, payloadChan) return nil } @@ -464,7 +347,7 @@ func (sap *Service) Start(*p2p.Server) error { // Stop is used to close down the service // This is mostly just to satisfy the node.Service interface func (sap *Service) Stop() error { - log.Infof("Stopping %s watcher service", sap.chain.String()) + log.Infof("stopping eth ipld server") sap.Lock() close(sap.QuitChan) sap.close() @@ -473,19 +356,19 @@ func (sap *Service) Stop() error { } // Node returns the node info for this service -func (sap *Service) Node() *node.Node { +func (sap *Service) Node() *node.Info { return sap.NodeInfo } // Chain returns the chain type for this service func (sap *Service) Chain() shared.ChainType { - return sap.chain + return shared.Ethereum } // close is used to close all listening subscriptions // close needs to be called with subscription access locked func (sap *Service) close() { - log.Infof("Closing all %s subscriptions", sap.chain.String()) + log.Infof("closing all eth ipld server subscriptions") for subType, subs := range sap.Subscriptions { for _, sub := range subs { sendNonBlockingQuit(sub) @@ -498,7 +381,7 @@ func (sap *Service) close() { // closeType is used to close all subscriptions of given type // closeType needs to be called with subscription access locked func (sap *Service) closeType(subType common.Hash) { - log.Infof("Closing all %s subscriptions of type %s", sap.chain.String(), subType.String()) + log.Infof("closing all eth ipld server subscriptions of type %s", subType.String()) subs := sap.Subscriptions[subType] for _, sub := range subs { sendNonBlockingQuit(sub) From f4591a6beba28fc374ef7c202e8169bfca70ddeb Mon Sep 17 00:00:00 2001 From: Ian Norden Date: Mon, 31 Aug 2020 10:52:47 -0500 Subject: [PATCH 03/12] update deps/paths/names --- .gitignore | 2 +- README.md | 32 ++++++++++++------------ dockerfiles/migrations/Dockerfile | 10 ++++---- dockerfiles/super_node/Dockerfile | 22 ++++++++-------- dockerfiles/super_node/entrypoint.sh | 6 ++--- dockerfiles/super_node/startup_script.sh | 2 +- scripts/reset_db | 4 +-- test_config/test_config.go | 2 +- utils/utils.go | 4 +-- utils/utils_test.go | 2 +- 10 files changed, 43 insertions(+), 43 deletions(-) diff --git a/.gitignore b/.gitignore index 15004028..1d7fa1f9 100644 --- a/.gitignore +++ b/.gitignore @@ -7,7 +7,7 @@ Vagrantfile vagrant*.sh .vagrant test_scripts/ -ipfs-blockchain-watcher +ipld-eth-server postgraphile/build/ postgraphile/node_modules/ postgraphile/package-lock.json diff --git a/README.md b/README.md index 306ed2a1..607308b2 100644 --- a/README.md +++ b/README.md @@ -1,8 +1,8 @@ -# ipfs-blockchain-watcher +# ipld-eth-server -[![Go Report Card](https://goreportcard.com/badge/github.com/vulcanize/ipfs-blockchain-watcher)](https://goreportcard.com/report/github.com/vulcanize/ipfs-blockchain-watcher) +[![Go Report Card](https://goreportcard.com/badge/github.com/vulcanize/ipld-eth-server)](https://goreportcard.com/report/github.com/vulcanize/ipld-eth-server) -> ipfs-blockchain-watcher is used to extract, transform, and load all eth or btc data into an IPFS-backing Postgres datastore while generating useful secondary indexes around the data in other Postgres tables +> ipld-eth-server is used to extract, transform, and load all eth or btc data into an IPFS-backing Postgres datastore while generating useful secondary indexes around the data in other Postgres tables ## Table of Contents 1. [Background](#background) @@ -13,13 +13,13 @@ 1. [License](#license) ## Background -ipfs-blockchain-watcher is a collection of interfaces that are used to extract, process, store, and index -all blockchain data in Postgres-IPFS. The raw data indexed by ipfs-blockchain-watcher serves as the basis for more specific watchers and applications. +ipld-eth-server is a collection of interfaces that are used to extract, process, store, and index +all blockchain data in Postgres-IPFS. The raw data indexed by ipld-eth-server serves as the basis for more specific watchers and applications. Currently the service supports complete processing of all Bitcoin and Ethereum data. ## Architecture -More details on the design of ipfs-blockchain-watcher can be found in [here](./documentation/architecture.md) +More details on the design of ipld-eth-server can be found in [here](./documentation/architecture.md) ## Dependencies Minimal build dependencies @@ -49,7 +49,7 @@ is required for running the automated tests and is used by the `make migrate` co 1. [Install Postgres](https://wiki.postgresql.org/wiki/Detailed_installation_guides) 1. Create a superuser for yourself and make sure `psql --list` works without prompting for a password. 1. `createdb vulcanize_public` -1. `cd $GOPATH/src/github.com/vulcanize/ipfs-blockchain-watcher` +1. `cd $GOPATH/src/github.com/vulcanize/ipld-eth-server` 1. Run the migrations: `make migrate HOST_NAME=localhost NAME=vulcanize_public PORT=5432` - There are optional vars `USER=username:password` if the database user is not the default user `postgres` and/or a password is present - To rollback a single step: `make rollback NAME=vulcanize_public` @@ -68,7 +68,7 @@ Data is stored in an [IPFS-backing Postgres datastore](https://github.com/ipfs/g By default data is written directly to the ipfs blockstore in Postgres; the public.blocks table. In this case no further IPFS configuration is needed at this time. -Optionally, ipfs-blockchain-watcher can be configured to function through an internal ipfs node interface using the flag: `-ipfs-mode=interface`. +Optionally, ipld-eth-server can be configured to function through an internal ipfs node interface using the flag: `-ipfs-mode=interface`. Operating through the ipfs interface provides the option to configure a block exchange that can search remotely for IPLD data found missing in the local datastore. This option is irrelevant in most cases and this mode has some disadvantages, namely: @@ -79,7 +79,7 @@ This option is irrelevant in most cases and this mode has some disadvantages, na More information for configuring Postgres-IPFS can be found [here](./documentation/ipfs.md) ### Blockchain -This section describes how to setup an Ethereum or Bitcoin node to serve as a data source for ipfs-blockchain-watcher +This section describes how to setup an Ethereum or Bitcoin node to serve as a data source for ipld-eth-server #### Ethereum For Ethereum, [a special fork of go-ethereum](https://github.com/vulcanize/go-ethereum/tree/statediff_at_anyblock-1.9.11) is currently *requirde*. @@ -122,7 +122,7 @@ The default ws url is "127.0.0.1:8546" and the default http url is "127.0.0.1:85 These values will be used as the `ethereum.wsPath` and `ethereum.httpPath` in the config, respectively. #### Bitcoin -For Bitcoin, ipfs-blockchain-watcher is able to operate entirely through the universally exposed JSON-RPC interfaces. +For Bitcoin, ipld-eth-server is able to operate entirely through the universally exposed JSON-RPC interfaces. This means any of the standard full nodes can be used (e.g. bitcoind, btcd) as the data source. Point at a remote node or set one up locally using the instructions for [bitcoind](https://github.com/bitcoin/bitcoin) and [btcd](https://github.com/btcsuite/btcd). @@ -133,11 +133,11 @@ The default http url is "127.0.0.1:8332". We will use the http endpoint as both ### Watcher Finally, setup the watcher process itself. -Start by downloading ipfs-blockchain-watcher and moving into the repo: +Start by downloading ipld-eth-server and moving into the repo: -`GO111MODULE=off go get -d github.com/vulcanize/ipfs-blockchain-watcher` +`GO111MODULE=off go get -d github.com/vulcanize/ipld-eth-server` -`cd $GOPATH/src/github.com/vulcanize/ipfs-blockchain-watcher` +`cd $GOPATH/src/github.com/vulcanize/ipld-eth-server` Then, build the binary: @@ -146,11 +146,11 @@ Then, build the binary: ## Usage After building the binary, run as -`./ipfs-blockchain-watcher watch --config=` +`./ipld-eth-server watch --config=` ### Configuration -Below is the set of universal config parameters for the ipfs-blockchain-watcher command, in .toml form, with the respective environmental variables commented to the side. +Below is the set of universal config parameters for the ipld-eth-server command, in .toml form, with the respective environmental variables commented to the side. This set of parameters needs to be set no matter the chain type. ```toml @@ -207,7 +207,7 @@ For Ethereum: ``` ### Exposing the data -A number of different APIs for remote access to ipfs-blockchain-watcher data can be exposed, these are discussed in more detail [here](./documentation/apis.md) +A number of different APIs for remote access to ipld-eth-server data can be exposed, these are discussed in more detail [here](./documentation/apis.md) ### Testing `make test` will run the unit tests diff --git a/dockerfiles/migrations/Dockerfile b/dockerfiles/migrations/Dockerfile index dbcea6d0..bce16c9d 100644 --- a/dockerfiles/migrations/Dockerfile +++ b/dockerfiles/migrations/Dockerfile @@ -7,15 +7,15 @@ RUN apk add busybox-extras # this is probably a noob move, but I want apk from alpine for the above but need to avoid Go 1.13 below as this error still occurs https://github.com/ipfs/go-ipfs/issues/6603 FROM golang:1.12.4 as builder -# Get and build ipfs-blockchain-watcher -ADD . /go/src/github.com/vulcanize/ipfs-blockchain-watcher +# Get and build ipld-eth-server +ADD . /go/src/github.com/vulcanize/ipld-eth-server # Build migration tool RUN go get -u -d github.com/pressly/goose/cmd/goose WORKDIR /go/src/github.com/pressly/goose/cmd/goose RUN GCO_ENABLED=0 GOOS=linux go build -a -installsuffix cgo -ldflags '-extldflags "-static"' -tags='no_mysql no_sqlite' -o goose . -WORKDIR /go/src/github.com/vulcanize/ipfs-blockchain-watcher +WORKDIR /go/src/github.com/vulcanize/ipld-eth-server # app container FROM alpine @@ -29,12 +29,12 @@ USER $USER # chown first so dir is writable # note: using $USER is merged, but not in the stable release yet -COPY --chown=5000:5000 --from=builder /go/src/github.com/vulcanize/ipfs-blockchain-watcher/dockerfiles/migrations/startup_script.sh . +COPY --chown=5000:5000 --from=builder /go/src/github.com/vulcanize/ipld-eth-server/dockerfiles/migrations/startup_script.sh . # keep binaries immutable COPY --from=builder /go/src/github.com/pressly/goose/cmd/goose/goose goose -COPY --from=builder /go/src/github.com/vulcanize/ipfs-blockchain-watcher/db/migrations migrations/vulcanizedb +COPY --from=builder /go/src/github.com/vulcanize/ipld-eth-server/db/migrations migrations/vulcanizedb # XXX dir is already writeable RUN touch vulcanizedb.log CMD ["./startup_script.sh"] \ No newline at end of file diff --git a/dockerfiles/super_node/Dockerfile b/dockerfiles/super_node/Dockerfile index 03c7aded..df9c6f1e 100644 --- a/dockerfiles/super_node/Dockerfile +++ b/dockerfiles/super_node/Dockerfile @@ -4,10 +4,10 @@ RUN apk --update --no-cache add make git g++ linux-headers # DEBUG RUN apk add busybox-extras -# Get and build ipfs-blockchain-watcher -ADD . /go/src/github.com/vulcanize/ipfs-blockchain-watcher -WORKDIR /go/src/github.com/vulcanize/ipfs-blockchain-watcher -RUN GO111MODULE=on GCO_ENABLED=0 GOOS=linux go build -a -installsuffix cgo -ldflags '-extldflags "-static"' -o ipfs-blockchain-watcher . +# Get and build ipld-eth-server +ADD . /go/src/github.com/vulcanize/ipld-eth-server +WORKDIR /go/src/github.com/vulcanize/ipld-eth-server +RUN GO111MODULE=on GCO_ENABLED=0 GOOS=linux go build -a -installsuffix cgo -ldflags '-extldflags "-static"' -o ipld-eth-server . # Build migration tool WORKDIR / @@ -15,7 +15,7 @@ RUN go get -u -d github.com/pressly/goose/cmd/goose WORKDIR /go/src/github.com/pressly/goose/cmd/goose RUN GCO_ENABLED=0 GOOS=linux go build -a -installsuffix cgo -ldflags '-extldflags "-static"' -tags='no_mysql no_sqlite' -o goose . -WORKDIR /go/src/github.com/vulcanize/ipfs-blockchain-watcher +WORKDIR /go/src/github.com/vulcanize/ipld-eth-server # app container FROM alpine @@ -32,16 +32,16 @@ USER $USER # chown first so dir is writable # note: using $USER is merged, but not in the stable release yet -COPY --chown=5000:5000 --from=builder /go/src/github.com/vulcanize/ipfs-blockchain-watcher/$CONFIG_FILE config.toml -COPY --chown=5000:5000 --from=builder /go/src/github.com/vulcanize/ipfs-blockchain-watcher/dockerfiles/super_node/startup_script.sh . -COPY --chown=5000:5000 --from=builder /go/src/github.com/vulcanize/ipfs-blockchain-watcher/dockerfiles/super_node/entrypoint.sh . +COPY --chown=5000:5000 --from=builder /go/src/github.com/vulcanize/ipld-eth-server/$CONFIG_FILE config.toml +COPY --chown=5000:5000 --from=builder /go/src/github.com/vulcanize/ipld-eth-server/dockerfiles/super_node/startup_script.sh . +COPY --chown=5000:5000 --from=builder /go/src/github.com/vulcanize/ipld-eth-server/dockerfiles/super_node/entrypoint.sh . # keep binaries immutable -COPY --from=builder /go/src/github.com/vulcanize/ipfs-blockchain-watcher/ipfs-blockchain-watcher ipfs-blockchain-watcher +COPY --from=builder /go/src/github.com/vulcanize/ipld-eth-server/ipld-eth-server ipld-eth-server COPY --from=builder /go/src/github.com/pressly/goose/cmd/goose/goose goose -COPY --from=builder /go/src/github.com/vulcanize/ipfs-blockchain-watcher/db/migrations migrations/vulcanizedb -COPY --from=builder /go/src/github.com/vulcanize/ipfs-blockchain-watcher/environments environments +COPY --from=builder /go/src/github.com/vulcanize/ipld-eth-server/db/migrations migrations/vulcanizedb +COPY --from=builder /go/src/github.com/vulcanize/ipld-eth-server/environments environments EXPOSE $EXPOSE_PORT_1 EXPOSE $EXPOSE_PORT_2 diff --git a/dockerfiles/super_node/entrypoint.sh b/dockerfiles/super_node/entrypoint.sh index ff93bc60..b203311a 100755 --- a/dockerfiles/super_node/entrypoint.sh +++ b/dockerfiles/super_node/entrypoint.sh @@ -35,7 +35,7 @@ echo "Beginning the vulcanizedb process" VDB_CONFIG_FILE=${VDB_CONFIG_FILE:-config.toml} DEFAULT_OPTIONS="--config=$VDB_CONFIG_FILE" VDB_FULL_CL=${VDB_FULL_CL:-$VDB_COMMAND $DEFAULT_OPTIONS} -echo running: ./ipfs-blockchain-watcher $VDB_FULL_CL $@ +echo running: ./ipld-eth-server $VDB_FULL_CL $@ case "$1" in "/bin/sh" ) @@ -49,8 +49,8 @@ if [[ -z "$vdb_args" ]]; then vdb_args="--config=config.toml" fi -echo running: ./ipfs-blockchain-watcher $vdb_args -./ipfs-blockchain-watcher $vdb_args +echo running: ./ipld-eth-server $vdb_args +./ipld-eth-server $vdb_args rv=$? if [ $rv != 0 ]; then diff --git a/dockerfiles/super_node/startup_script.sh b/dockerfiles/super_node/startup_script.sh index 3ff9ca02..326b1baf 100755 --- a/dockerfiles/super_node/startup_script.sh +++ b/dockerfiles/super_node/startup_script.sh @@ -49,7 +49,7 @@ fi # If IPFS initialization was successful if [[ $? -eq 0 ]]; then echo "Running the VulcanizeDB process" - ./ipfs-blockchain-watcher ${VDB_COMMAND} --config=config.toml + ./ipld-eth-server ${VDB_COMMAND} --config=config.toml else echo "Could not initialize IPFS." exit 1 diff --git a/scripts/reset_db b/scripts/reset_db index f6fefe3b..0a57f8d3 100755 --- a/scripts/reset_db +++ b/scripts/reset_db @@ -11,9 +11,9 @@ fi db=$1 dir=$(basename "$(pwd)") -if [ $dir != "ipfs-blockchain-watcher" ] +if [ $dir != "ipld-eth-server" ] then - echo "Run me from the ipfs-blockchain-watcher root dir" + echo "Run me from the ipld-eth-server root dir" exit 1 fi diff --git a/test_config/test_config.go b/test_config/test_config.go index 96143b04..7236422e 100644 --- a/test_config/test_config.go +++ b/test_config/test_config.go @@ -34,7 +34,7 @@ func init() { func setTestConfig() { vip := viper.New() vip.SetConfigName("testing") - vip.AddConfigPath("$GOPATH/src/github.com/vulcanize/ipfs-blockchain-watcher/environments/") + vip.AddConfigPath("$GOPATH/src/github.com/vulcanize/ipld-eth-server/environments/") if err := vip.ReadInConfig(); err != nil { logrus.Fatal(err) } diff --git a/utils/utils.go b/utils/utils.go index 73e70708..56771295 100644 --- a/utils/utils.go +++ b/utils/utils.go @@ -22,9 +22,9 @@ import ( "github.com/sirupsen/logrus" "github.com/vulcanize/ipfs-blockchain-watcher/pkg/config" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/node" + "github.com/vulcanize/ipld-eth-server/pkg/node" "github.com/vulcanize/ipfs-blockchain-watcher/pkg/postgres" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/shared" + "github.com/vulcanize/ipld-eth-server/pkg/shared" ) func LoadPostgres(database config.Database, node node.Node) postgres.DB { diff --git a/utils/utils_test.go b/utils/utils_test.go index b6c865d0..b36ac66c 100644 --- a/utils/utils_test.go +++ b/utils/utils_test.go @@ -20,7 +20,7 @@ import ( . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" - "github.com/vulcanize/ipfs-blockchain-watcher/utils" + "github.com/vulcanize/ipld-eth-server/utils" ) var _ = Describe("GetBlockHeightBins", func() { From d645f52e87810d96b3ff5e30e213defaa9084bb5 Mon Sep 17 00:00:00 2001 From: Ian Norden Date: Mon, 31 Aug 2020 10:58:16 -0500 Subject: [PATCH 04/12] watch => serve --- cmd/root.go | 2 +- cmd/serve.go | 122 +++++++++++++++++ cmd/streamEthSubscribe.go | 6 +- cmd/version.go | 10 +- cmd/watch.go | 194 --------------------------- main.go | 2 +- pkg/client/client.go | 4 +- pkg/{watch => serve}/api.go | 20 +-- pkg/{watch => serve}/config.go | 2 +- pkg/{watch => serve}/helpers.go | 2 +- pkg/{watch => serve}/service.go | 12 +- pkg/{watch => serve}/subscription.go | 2 +- 12 files changed, 153 insertions(+), 225 deletions(-) create mode 100644 cmd/serve.go delete mode 100644 cmd/watch.go rename pkg/{watch => serve}/api.go (86%) rename pkg/{watch => serve}/config.go (99%) rename pkg/{watch => serve}/helpers.go (98%) rename pkg/{watch => serve}/service.go (97%) rename pkg/{watch => serve}/subscription.go (99%) diff --git a/cmd/root.go b/cmd/root.go index af6c23e2..fdcd87d2 100644 --- a/cmd/root.go +++ b/cmd/root.go @@ -33,7 +33,7 @@ var ( ) var rootCmd = &cobra.Command{ - Use: "ipfs-blockchain-watcher", + Use: "ipld-eth-server", PersistentPreRun: initFuncs, } diff --git a/cmd/serve.go b/cmd/serve.go new file mode 100644 index 00000000..e0ab26e9 --- /dev/null +++ b/cmd/serve.go @@ -0,0 +1,122 @@ +// Copyright © 2020 Vulcanize, Inc +// +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU Affero General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU Affero General Public License for more details. +// +// You should have received a copy of the GNU Affero General Public License +// along with this program. If not, see . + +package cmd + +import ( + "os" + "os/signal" + s "sync" + + "github.com/ethereum/go-ethereum/rpc" + log "github.com/sirupsen/logrus" + "github.com/spf13/cobra" + "github.com/spf13/viper" + + "github.com/vulcanize/ipfs-blockchain-watcher/pkg/eth" + + "github.com/vulcanize/ipld-eth-server/pkg/serve" + v "github.com/vulcanize/ipld-eth-server/version" +) + +// watchCmd represents the watch command +var watchCmd = &cobra.Command{ + Use: "watch", + Short: "serve chain data from PG-IPFS", + Long: `This command configures a VulcanizeDB ipld-eth-server. + +`, + Run: func(cmd *cobra.Command, args []string) { + subCommand = cmd.CalledAs() + logWithCommand = *log.WithField("SubCommand", subCommand) + watch() + }, +} + +func watch() { + logWithCommand.Infof("running ipld-eth-server version: %s", v.VersionWithMeta) + + var forwardPayloadChan chan eth.ConvertedPayload + wg := new(s.WaitGroup) + logWithCommand.Debug("loading watcher configuration variables") + watcherConfig, err := serve.NewConfig() + if err != nil { + logWithCommand.Fatal(err) + } + logWithCommand.Infof("watcher config: %+v", watcherConfig) + logWithCommand.Debug("initializing new watcher service") + s, err := serve.NewServer(watcherConfig) + if err != nil { + logWithCommand.Fatal(err) + } + + logWithCommand.Info("starting up watcher servers") + forwardPayloadChan = make(chan eth.ConvertedPayload, serve.PayloadChanBufferSize) + s.Serve(wg, forwardPayloadChan) + if err := startServers(s, watcherConfig); err != nil { + logWithCommand.Fatal(err) + } + + + shutdown := make(chan os.Signal) + signal.Notify(shutdown, os.Interrupt) + <-shutdown + s.Stop() + wg.Wait() +} + +func startServers(watcher serve.Server, settings *serve.Config) error { + logWithCommand.Debug("starting up IPC server") + _, _, err := rpc.StartIPCEndpoint(settings.IPCEndpoint, watcher.APIs()) + if err != nil { + return err + } + logWithCommand.Debug("starting up WS server") + _, _, err = rpc.StartWSEndpoint(settings.WSEndpoint, watcher.APIs(), []string{"vdb"}, nil, true) + if err != nil { + return err + } + logWithCommand.Debug("starting up HTTP server") + _, _, err = rpc.StartHTTPEndpoint(settings.HTTPEndpoint, watcher.APIs(), []string{"eth"}, nil, nil, rpc.HTTPTimeouts{}) + return err +} + +func init() { + rootCmd.AddCommand(watchCmd) + + // flags for all config variables + watchCmd.PersistentFlags().String("watcher-ws-path", "", "vdb server ws path") + watchCmd.PersistentFlags().String("watcher-http-path", "", "vdb server http path") + watchCmd.PersistentFlags().String("watcher-ipc-path", "", "vdb server ipc path") + + watchCmd.PersistentFlags().String("eth-ws-path", "", "ws url for ethereum node") + watchCmd.PersistentFlags().String("eth-http-path", "", "http url for ethereum node") + watchCmd.PersistentFlags().String("eth-node-id", "", "eth node id") + watchCmd.PersistentFlags().String("eth-client-name", "", "eth client name") + watchCmd.PersistentFlags().String("eth-genesis-block", "", "eth genesis block hash") + watchCmd.PersistentFlags().String("eth-network-id", "", "eth network id") + + // and their bindings + viper.BindPFlag("watcher.wsPath", watchCmd.PersistentFlags().Lookup("watcher-ws-path")) + viper.BindPFlag("watcher.httpPath", watchCmd.PersistentFlags().Lookup("watcher-http-path")) + viper.BindPFlag("watcher.ipcPath", watchCmd.PersistentFlags().Lookup("watcher-ipc-path")) + + viper.BindPFlag("ethereum.wsPath", watchCmd.PersistentFlags().Lookup("eth-ws-path")) + viper.BindPFlag("ethereum.httpPath", watchCmd.PersistentFlags().Lookup("eth-http-path")) + viper.BindPFlag("ethereum.nodeID", watchCmd.PersistentFlags().Lookup("eth-node-id")) + viper.BindPFlag("ethereum.clientName", watchCmd.PersistentFlags().Lookup("eth-client-name")) + viper.BindPFlag("ethereum.genesisBlock", watchCmd.PersistentFlags().Lookup("eth-genesis-block")) + viper.BindPFlag("ethereum.networkID", watchCmd.PersistentFlags().Lookup("eth-network-id")) +} diff --git a/cmd/streamEthSubscribe.go b/cmd/streamEthSubscribe.go index fb7f6f43..4fc692a3 100644 --- a/cmd/streamEthSubscribe.go +++ b/cmd/streamEthSubscribe.go @@ -28,9 +28,9 @@ import ( "github.com/spf13/cobra" "github.com/spf13/viper" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/client" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/eth" - w "github.com/vulcanize/ipfs-blockchain-watcher/pkg/watch" + "github.com/vulcanize/ipld-eth-server/pkg/client" + "github.com/vulcanize/ipld-eth-server/pkg/eth" + w "github.com/vulcanize/ipld-eth-server/pkg/serve" ) // streamEthSubscriptionCmd represents the streamEthSubscription command diff --git a/cmd/version.go b/cmd/version.go index dd38a372..5c01ae83 100644 --- a/cmd/version.go +++ b/cmd/version.go @@ -19,20 +19,20 @@ import ( log "github.com/sirupsen/logrus" "github.com/spf13/cobra" - v "github.com/vulcanize/ipfs-blockchain-watcher/version" + v "github.com/vulcanize/ipld-eth-server/version" ) // versionCmd represents the version command var versionCmd = &cobra.Command{ Use: "version", - Short: "Prints the version of ipfs-blockchain-watcher", - Long: `Use this command to fetch the version of ipfs-blockchain-watcher + Short: "Prints the version of ipld-eth-server", + Long: `Use this command to fetch the version of ipld-eth-server -Usage: ./ipfs-blockchain-watcher version`, +Usage: ./ipld-eth-server version`, Run: func(cmd *cobra.Command, args []string) { subCommand = cmd.CalledAs() logWithCommand = *log.WithField("SubCommand", subCommand) - logWithCommand.Infof("ipfs-blockchain-watcher version: %s", v.VersionWithMeta) + logWithCommand.Infof("ipld-eth-server version: %s", v.VersionWithMeta) }, } diff --git a/cmd/watch.go b/cmd/watch.go deleted file mode 100644 index de6ea570..00000000 --- a/cmd/watch.go +++ /dev/null @@ -1,194 +0,0 @@ -// Copyright © 2020 Vulcanize, Inc -// -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. -// -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. -// -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package cmd - -import ( - "os" - "os/signal" - s "sync" - - "github.com/ethereum/go-ethereum/rpc" - log "github.com/sirupsen/logrus" - "github.com/spf13/cobra" - "github.com/spf13/viper" - - h "github.com/vulcanize/ipfs-blockchain-watcher/pkg/historical" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/shared" - w "github.com/vulcanize/ipfs-blockchain-watcher/pkg/watch" - v "github.com/vulcanize/ipfs-blockchain-watcher/version" -) - -// watchCmd represents the watch command -var watchCmd = &cobra.Command{ - Use: "watch", - Short: "sync chain data into PG-IPFS", - Long: `This command configures a VulcanizeDB ipfs-blockchain-watcher. - -The Sync process streams all chain data from the appropriate chain, processes this data into IPLD objects -and publishes them to IPFS. It then indexes the CIDs against useful data fields/metadata in Postgres. - -The Serve process creates and exposes a rpc subscription server over ws and ipc. Transformers can subscribe to -these endpoints to stream - -The BackFill process spins up a background process which periodically probes the Postgres database to identify -and fill in gaps in the data -`, - Run: func(cmd *cobra.Command, args []string) { - subCommand = cmd.CalledAs() - logWithCommand = *log.WithField("SubCommand", subCommand) - watch() - }, -} - -func watch() { - logWithCommand.Infof("running ipfs-blockchain-watcher version: %s", v.VersionWithMeta) - - var forwardPayloadChan chan shared.ConvertedData - wg := new(s.WaitGroup) - logWithCommand.Debug("loading watcher configuration variables") - watcherConfig, err := w.NewConfig() - if err != nil { - logWithCommand.Fatal(err) - } - logWithCommand.Infof("watcher config: %+v", watcherConfig) - logWithCommand.Debug("initializing new watcher service") - watcher, err := w.NewWatcher(watcherConfig) - if err != nil { - logWithCommand.Fatal(err) - } - - if watcherConfig.Serve { - logWithCommand.Info("starting up watcher servers") - forwardPayloadChan = make(chan shared.ConvertedData, w.PayloadChanBufferSize) - watcher.Serve(wg, forwardPayloadChan) - if err := startServers(watcher, watcherConfig); err != nil { - logWithCommand.Fatal(err) - } - } - - if watcherConfig.Sync { - logWithCommand.Info("starting up watcher sync process") - if err := watcher.Sync(wg, forwardPayloadChan); err != nil { - logWithCommand.Fatal(err) - } - } - - var backFiller h.BackFillInterface - if watcherConfig.Historical { - historicalConfig, err := h.NewConfig() - if err != nil { - logWithCommand.Fatal(err) - } - logWithCommand.Debug("initializing new historical backfill service") - backFiller, err = h.NewBackFillService(historicalConfig, forwardPayloadChan) - if err != nil { - logWithCommand.Fatal(err) - } - logWithCommand.Info("starting up watcher backfill process") - backFiller.BackFill(wg) - } - - shutdown := make(chan os.Signal) - signal.Notify(shutdown, os.Interrupt) - <-shutdown - if watcherConfig.Historical { - backFiller.Stop() - } - watcher.Stop() - wg.Wait() -} - -func startServers(watcher w.Watcher, settings *w.Config) error { - logWithCommand.Debug("starting up IPC server") - _, _, err := rpc.StartIPCEndpoint(settings.IPCEndpoint, watcher.APIs()) - if err != nil { - return err - } - logWithCommand.Debug("starting up WS server") - _, _, err = rpc.StartWSEndpoint(settings.WSEndpoint, watcher.APIs(), []string{"vdb"}, nil, true) - if err != nil { - return err - } - logWithCommand.Debug("starting up HTTP server") - _, _, err = rpc.StartHTTPEndpoint(settings.HTTPEndpoint, watcher.APIs(), []string{settings.Chain.API()}, nil, nil, rpc.HTTPTimeouts{}) - return err -} - -func init() { - rootCmd.AddCommand(watchCmd) - - // flags for all config variables - watchCmd.PersistentFlags().String("watcher-chain", "", "which chain to support, options are currently Ethereum or Bitcoin.") - watchCmd.PersistentFlags().Bool("watcher-server", false, "turn vdb server on or off") - watchCmd.PersistentFlags().String("watcher-ws-path", "", "vdb server ws path") - watchCmd.PersistentFlags().String("watcher-http-path", "", "vdb server http path") - watchCmd.PersistentFlags().String("watcher-ipc-path", "", "vdb server ipc path") - watchCmd.PersistentFlags().Bool("watcher-sync", false, "turn vdb sync on or off") - watchCmd.PersistentFlags().Int("watcher-workers", 0, "how many worker goroutines to publish and index data") - watchCmd.PersistentFlags().Bool("watcher-back-fill", false, "turn vdb backfill on or off") - watchCmd.PersistentFlags().Int("watcher-frequency", 0, "how often (in seconds) the backfill process checks for gaps") - watchCmd.PersistentFlags().Int("watcher-batch-size", 0, "data fetching batch size") - watchCmd.PersistentFlags().Int("watcher-batch-number", 0, "how many goroutines to fetch data concurrently") - watchCmd.PersistentFlags().Int("watcher-validation-level", 0, "backfill will resync any data below this level") - watchCmd.PersistentFlags().Int("watcher-timeout", 0, "timeout used for backfill http requests") - - watchCmd.PersistentFlags().String("btc-ws-path", "", "ws url for bitcoin node") - watchCmd.PersistentFlags().String("btc-http-path", "", "http url for bitcoin node") - watchCmd.PersistentFlags().String("btc-password", "", "password for btc node") - watchCmd.PersistentFlags().String("btc-username", "", "username for btc node") - watchCmd.PersistentFlags().String("btc-node-id", "", "btc node id") - watchCmd.PersistentFlags().String("btc-client-name", "", "btc client name") - watchCmd.PersistentFlags().String("btc-genesis-block", "", "btc genesis block hash") - watchCmd.PersistentFlags().String("btc-network-id", "", "btc network id") - - watchCmd.PersistentFlags().String("eth-ws-path", "", "ws url for ethereum node") - watchCmd.PersistentFlags().String("eth-http-path", "", "http url for ethereum node") - watchCmd.PersistentFlags().String("eth-node-id", "", "eth node id") - watchCmd.PersistentFlags().String("eth-client-name", "", "eth client name") - watchCmd.PersistentFlags().String("eth-genesis-block", "", "eth genesis block hash") - watchCmd.PersistentFlags().String("eth-network-id", "", "eth network id") - - // and their bindings - viper.BindPFlag("watcher.chain", watchCmd.PersistentFlags().Lookup("watcher-chain")) - viper.BindPFlag("watcher.server", watchCmd.PersistentFlags().Lookup("watcher-server")) - viper.BindPFlag("watcher.wsPath", watchCmd.PersistentFlags().Lookup("watcher-ws-path")) - viper.BindPFlag("watcher.httpPath", watchCmd.PersistentFlags().Lookup("watcher-http-path")) - viper.BindPFlag("watcher.ipcPath", watchCmd.PersistentFlags().Lookup("watcher-ipc-path")) - viper.BindPFlag("watcher.sync", watchCmd.PersistentFlags().Lookup("watcher-sync")) - viper.BindPFlag("watcher.workers", watchCmd.PersistentFlags().Lookup("watcher-workers")) - viper.BindPFlag("watcher.backFill", watchCmd.PersistentFlags().Lookup("watcher-back-fill")) - viper.BindPFlag("watcher.frequency", watchCmd.PersistentFlags().Lookup("watcher-frequency")) - viper.BindPFlag("watcher.batchSize", watchCmd.PersistentFlags().Lookup("watcher-batch-size")) - viper.BindPFlag("watcher.batchNumber", watchCmd.PersistentFlags().Lookup("watcher-batch-number")) - viper.BindPFlag("watcher.validationLevel", watchCmd.PersistentFlags().Lookup("watcher-validation-level")) - viper.BindPFlag("watcher.timeout", watchCmd.PersistentFlags().Lookup("watcher-timeout")) - - viper.BindPFlag("bitcoin.wsPath", watchCmd.PersistentFlags().Lookup("btc-ws-path")) - viper.BindPFlag("bitcoin.httpPath", watchCmd.PersistentFlags().Lookup("btc-http-path")) - viper.BindPFlag("bitcoin.pass", watchCmd.PersistentFlags().Lookup("btc-password")) - viper.BindPFlag("bitcoin.user", watchCmd.PersistentFlags().Lookup("btc-username")) - viper.BindPFlag("bitcoin.nodeID", watchCmd.PersistentFlags().Lookup("btc-node-id")) - viper.BindPFlag("bitcoin.clientName", watchCmd.PersistentFlags().Lookup("btc-client-name")) - viper.BindPFlag("bitcoin.genesisBlock", watchCmd.PersistentFlags().Lookup("btc-genesis-block")) - viper.BindPFlag("bitcoin.networkID", watchCmd.PersistentFlags().Lookup("btc-network-id")) - - viper.BindPFlag("ethereum.wsPath", watchCmd.PersistentFlags().Lookup("eth-ws-path")) - viper.BindPFlag("ethereum.httpPath", watchCmd.PersistentFlags().Lookup("eth-http-path")) - viper.BindPFlag("ethereum.nodeID", watchCmd.PersistentFlags().Lookup("eth-node-id")) - viper.BindPFlag("ethereum.clientName", watchCmd.PersistentFlags().Lookup("eth-client-name")) - viper.BindPFlag("ethereum.genesisBlock", watchCmd.PersistentFlags().Lookup("eth-genesis-block")) - viper.BindPFlag("ethereum.networkID", watchCmd.PersistentFlags().Lookup("eth-network-id")) -} diff --git a/main.go b/main.go index af533f77..6bc9b782 100644 --- a/main.go +++ b/main.go @@ -18,7 +18,7 @@ package main import ( "github.com/sirupsen/logrus" - "github.com/vulcanize/ipfs-blockchain-watcher/cmd" + "github.com/vulcanize/ipld-eth-server/cmd" ) func main() { diff --git a/pkg/client/client.go b/pkg/client/client.go index 590e0166..3ffb296f 100644 --- a/pkg/client/client.go +++ b/pkg/client/client.go @@ -22,7 +22,7 @@ import ( "github.com/ethereum/go-ethereum/rpc" - "github.com/vulcanize/ipld-eth-server/pkg/watch" + "github.com/vulcanize/ipld-eth-server/pkg/serve" ) // Client is used to subscribe to the ipld-eth-server ipld data stream @@ -38,6 +38,6 @@ func NewClient(c *rpc.Client) *Client { } // Stream is the main loop for subscribing to iplds from an ipld-eth-server server -func (c *Client) Stream(payloadChan chan watch.SubscriptionPayload, rlpParams []byte) (*rpc.ClientSubscription, error) { +func (c *Client) Stream(payloadChan chan serve.SubscriptionPayload, rlpParams []byte) (*rpc.ClientSubscription, error) { return c.c.Subscribe(context.Background(), "vdb", payloadChan, "stream", rlpParams) } diff --git a/pkg/watch/api.go b/pkg/serve/api.go similarity index 86% rename from pkg/watch/api.go rename to pkg/serve/api.go index fd95b79d..0e2aaa4d 100644 --- a/pkg/watch/api.go +++ b/pkg/serve/api.go @@ -14,7 +14,7 @@ // You should have received a copy of the GNU Affero General Public License // along with this program. If not, see . -package watch +package serve import ( "context" @@ -36,20 +36,20 @@ const APIName = "vdb" // APIVersion is the version of the state diffing service API const APIVersion = "0.0.1" -// PublicWatcherAPI is the public api for the watcher -type PublicWatcherAPI struct { - w Watcher +// PublicServerAPI is the public api for the watcher +type PublicServerAPI struct { + w Server } -// NewPublicWatcherAPI creates a new PublicWatcherAPI with the provided underlying Watcher process -func NewPublicWatcherAPI(w Watcher) *PublicWatcherAPI { - return &PublicWatcherAPI{ +// NewPublicServerAPI creates a new PublicServerAPI with the provided underlying Server process +func NewPublicServerAPI(w Server) *PublicServerAPI { + return &PublicServerAPI{ w: w, } } // Stream is the public method to setup a subscription that fires off IPLD payloads as they are processed -func (api *PublicWatcherAPI) Stream(ctx context.Context, params eth.SubscriptionSettings) (*rpc.Subscription, error) { +func (api *PublicServerAPI) Stream(ctx context.Context, params eth.SubscriptionSettings) (*rpc.Subscription, error) { // ensure that the RPC connection supports subscriptions notifier, supported := rpc.NotifierFromContext(ctx) if !supported { @@ -89,12 +89,12 @@ func (api *PublicWatcherAPI) Stream(ctx context.Context, params eth.Subscription // Node is a public rpc method to allow transformers to fetch the node info for the watcher // NOTE: this is the node info for the node that the watcher is syncing from, not the node info for the watcher itself -func (api *PublicWatcherAPI) Node() *node.Info { +func (api *PublicServerAPI) Node() *node.Info { return api.w.Node() } // Chain returns the chain type that this watcher instance supports -func (api *PublicWatcherAPI) Chain() shared.ChainType { +func (api *PublicServerAPI) Chain() shared.ChainType { return api.w.Chain() } diff --git a/pkg/watch/config.go b/pkg/serve/config.go similarity index 99% rename from pkg/watch/config.go rename to pkg/serve/config.go index 45880186..5a525c30 100644 --- a/pkg/watch/config.go +++ b/pkg/serve/config.go @@ -14,7 +14,7 @@ // You should have received a copy of the GNU Affero General Public License // along with this program. If not, see . -package watch +package serve import ( "os" diff --git a/pkg/watch/helpers.go b/pkg/serve/helpers.go similarity index 98% rename from pkg/watch/helpers.go rename to pkg/serve/helpers.go index 1e13c18a..a4571197 100644 --- a/pkg/watch/helpers.go +++ b/pkg/serve/helpers.go @@ -14,7 +14,7 @@ // You should have received a copy of the GNU Affero General Public License // along with this program. If not, see . -package watch +package serve import log "github.com/sirupsen/logrus" diff --git a/pkg/watch/service.go b/pkg/serve/service.go similarity index 97% rename from pkg/watch/service.go rename to pkg/serve/service.go index b3ce55bf..adaf027e 100644 --- a/pkg/watch/service.go +++ b/pkg/serve/service.go @@ -14,7 +14,7 @@ // You should have received a copy of the GNU Affero General Public License // along with this program. If not, see . -package watch +package serve import ( "fmt" @@ -40,10 +40,10 @@ const ( PayloadChanBufferSize = 2000 ) -// Watcher is the top level interface for streaming, converting to IPLDs, publishing, +// Server is the top level interface for streaming, converting to IPLDs, publishing, // and indexing all chain data; screening this data; and serving it up to subscribed clients // This service is compatible with the Ethereum service interface (node.Service) -type Watcher interface { +type Server interface { // APIs(), Protocols(), Start() and Stop() ethnode.Service // Pub-Sub handling event loop @@ -82,8 +82,8 @@ type Service struct { serveWg *sync.WaitGroup } -// NewWatcher creates a new Watcher using an underlying Service struct -func NewWatcher(settings *Config) (Watcher, error) { +// NewServer creates a new Server using an underlying Service struct +func NewServer(settings *Config) (Server, error) { sn := new(Service) sn.Retriever = eth.NewCIDRetriever(settings.DB) sn.IPLDFetcher = eth.NewIPLDFetcher(settings.DB) @@ -108,7 +108,7 @@ func (sap *Service) APIs() []rpc.API { { Namespace: APIName, Version: APIVersion, - Service: NewPublicWatcherAPI(sap), + Service: NewPublicServerAPI(sap), Public: true, }, { diff --git a/pkg/watch/subscription.go b/pkg/serve/subscription.go similarity index 99% rename from pkg/watch/subscription.go rename to pkg/serve/subscription.go index 1b3474c7..41383590 100644 --- a/pkg/watch/subscription.go +++ b/pkg/serve/subscription.go @@ -14,7 +14,7 @@ // You should have received a copy of the GNU Affero General Public License // along with this program. If not, see . -package watch +package serve import ( "errors" From f4b7b92dd8aca95ab1714cf4bcabd90412224c17 Mon Sep 17 00:00:00 2001 From: Ian Norden Date: Mon, 31 Aug 2020 10:58:56 -0500 Subject: [PATCH 05/12] bump minor version --- version/version.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/version/version.go b/version/version.go index b36e5f4a..ae3220e0 100644 --- a/version/version.go +++ b/version/version.go @@ -20,8 +20,8 @@ import "fmt" const ( Major = 0 // Major version component of the current release - Minor = 1 // Minor version component of the current release - Patch = 2 // Patch version component of the current release + Minor = 2 // Minor version component of the current release + Patch = 0 // Patch version component of the current release Meta = "alpha" // Version metadata to append to the version string ) From 0d28234804b223bfed4755ecc077e1777e55e6b4 Mon Sep 17 00:00:00 2001 From: Ian Norden Date: Mon, 31 Aug 2020 10:59:15 -0500 Subject: [PATCH 06/12] continue refactor/purge --- cmd/{streamEthSubscribe.go => subscribe.go} | 16 +- documentation/apis.md | 80 +++++----- documentation/ipfs.md | 53 ------ environments/example.toml | 14 ++ environments/superNodeETH.toml | 49 ------ environments/superNodeSubscription.toml | 30 ---- go.mod | 2 +- go.sum | 1 + pkg/builders/builders.go | 168 -------------------- pkg/eth/cid_retriever.go | 52 ------ pkg/eth/mocks/batch_client.go | 86 ---------- pkg/eth/mocks/converter.go | 66 -------- pkg/eth/mocks/indexer.go | 41 ----- pkg/eth/mocks/publisher.go | 61 ------- pkg/eth/mocks/stream_client.go | 44 ----- pkg/serve/config.go | 10 +- pkg/serve/service.go | 9 -- pkg/shared/mocks/payload_fetcher.go | 50 ------ pkg/shared/mocks/retriever.go | 64 -------- pkg/shared/mocks/streamer.go | 43 ----- test_config/test_config.go | 6 +- utils/utilities_suite_test.go | 36 ----- utils/utils.go | 89 ----------- utils/utils_test.go | 74 --------- 24 files changed, 69 insertions(+), 1075 deletions(-) rename cmd/{streamEthSubscribe.go => subscribe.go} (95%) delete mode 100644 documentation/ipfs.md create mode 100644 environments/example.toml delete mode 100644 environments/superNodeETH.toml delete mode 100644 environments/superNodeSubscription.toml delete mode 100644 pkg/builders/builders.go delete mode 100644 pkg/eth/mocks/batch_client.go delete mode 100644 pkg/eth/mocks/converter.go delete mode 100644 pkg/eth/mocks/indexer.go delete mode 100644 pkg/eth/mocks/publisher.go delete mode 100644 pkg/eth/mocks/stream_client.go delete mode 100644 pkg/shared/mocks/payload_fetcher.go delete mode 100644 pkg/shared/mocks/retriever.go delete mode 100644 pkg/shared/mocks/streamer.go delete mode 100644 utils/utilities_suite_test.go delete mode 100644 utils/utils.go delete mode 100644 utils/utils_test.go diff --git a/cmd/streamEthSubscribe.go b/cmd/subscribe.go similarity index 95% rename from cmd/streamEthSubscribe.go rename to cmd/subscribe.go index 4fc692a3..abe5e93d 100644 --- a/cmd/streamEthSubscribe.go +++ b/cmd/subscribe.go @@ -28,29 +28,31 @@ import ( "github.com/spf13/cobra" "github.com/spf13/viper" + eth2 "github.com/vulcanize/ipld-eth-indexer/pkg/eth" + "github.com/vulcanize/ipld-eth-server/pkg/client" "github.com/vulcanize/ipld-eth-server/pkg/eth" w "github.com/vulcanize/ipld-eth-server/pkg/serve" ) -// streamEthSubscriptionCmd represents the streamEthSubscription command -var streamEthSubscriptionCmd = &cobra.Command{ - Use: "streamEthSubscription", +// subscribeCmd represents the subscribe command +var subscribeCmd = &cobra.Command{ + Use: "subscribe", Short: "This command is used to subscribe to the eth ipfs watcher data stream with the provided filters", Long: `This command is for demo and testing purposes and is used to subscribe to the watcher with the provided subscription configuration parameters. It does not do anything with the data streamed from the watcher other than unpack it and print it out for demonstration purposes.`, Run: func(cmd *cobra.Command, args []string) { subCommand = cmd.CalledAs() logWithCommand = *log.WithField("SubCommand", subCommand) - streamEthSubscription() + subscribe() }, } func init() { - rootCmd.AddCommand(streamEthSubscriptionCmd) + rootCmd.AddCommand(subscribeCmd) } -func streamEthSubscription() { +func subscribe() { // Prep the subscription config/filters to be sent to the server ethSubConfig, err := eth.NewEthSubscriptionConfig() if err != nil { @@ -85,7 +87,7 @@ func streamEthSubscription() { logWithCommand.Error(payload.Err) continue } - var ethData eth.IPLDs + var ethData eth2.IPLDs if err := rlp.DecodeBytes(payload.Data, ðData); err != nil { logWithCommand.Error(err) continue diff --git a/documentation/apis.md b/documentation/apis.md index 6b03ac82..09ccdd81 100644 --- a/documentation/apis.md +++ b/documentation/apis.md @@ -1,5 +1,5 @@ -## ipfs-blockchain-watcher APIs -We can expose a number of different APIs for remote access to ipfs-blockchain-watcher data +## ipld-eth-server APIs +We can expose a number of different APIs for remote access to ipld-eth-server data ### Table of Contents @@ -9,7 +9,7 @@ We can expose a number of different APIs for remote access to ipfs-blockchain-wa ### Postgraphile -ipfs-blockchain-watcher stores all processed data in Postgres using PG-IPFS, this includes all of the IPLD objects. +ipld-eth-server stores all processed data in Postgres using PG-IPFS, this includes all of the IPLD objects. [Postgraphile](https://www.graphile.org/postgraphile/) can be used to expose GraphQL endpoints for the Postgres tables. e.g. @@ -22,15 +22,15 @@ All of their data can then be queried with standard [GraphQL](https://graphql.or ### RPC Subscription Interface -A direct, real-time subscription to the data being processed by ipfs-blockchain-watcher can be established over WS or IPC through the [Stream](../pkg/watch/api.go#L53) RPC method. +A direct, real-time subscription to the data being processed by ipld-eth-server can be established over WS or IPC through the [Stream](../pkg/serve/api.go#L53) RPC method. This method is not chain-specific and each chain-type supports it, it is accessed under the "vdb" namespace rather than a chain-specific namespace. An interface for subscribing to this endpoint is provided [here](../pkg/client/client.go). When subscribing to this endpoint, the subscriber provides a set of RLP-encoded subscription parameters. These parameters will be chain-specific, and are used -by ipfs-blockchain-watcher to filter and return a requested subset of chain data to the subscriber. (e.g. [BTC](../pkg/btc/subscription_config.go), [ETH](../../pkg/eth/subscription_config.go)). +by ipld-eth-server to filter and return a requested subset of chain data to the subscriber. (e.g. [BTC](../pkg/btc/subscription_config.go), [ETH](../../pkg/eth/subscription_config.go)). #### Ethereum RPC Subscription -An example of how to subscribe to a real-time Ethereum data feed from ipfs-blockchain-watcher using the `Stream` RPC method is provided below +An example of how to subscribe to a real-time Ethereum data feed from ipld-eth-server using the `Stream` RPC method is provided below ```go package main @@ -40,9 +40,9 @@ An example of how to subscribe to a real-time Ethereum data feed from ipfs-block "github.com/ethereum/go-ethereum/rpc" "github.com/spf13/viper" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/client" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/eth" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/watch" + "github.com/vulcanize/ipld-eth-server/pkg/client" + "github.com/vulcanize/ipld-eth-server/pkg/eth" + "github.com/vulcanize/ipld-eth-server/pkg/watch" ) config, _ := eth.NewEthSubscriptionConfig() @@ -101,10 +101,10 @@ These configuration parameters are broken down as follows: `ethSubscription.wsPath` is used to define the watcher ws url OR ipc endpoint to subscribe to -`ethSubscription.historicalData` specifies whether or not ipfs-blockchain-watcher should look up historical data in its cache and +`ethSubscription.historicalData` specifies whether or not ipld-eth-server should look up historical data in its cache and send that to the subscriber, if this is set to `false` then only newly synced/incoming data is streamed -`ethSubscription.historicalDataOnly` will tell ipfs-blockchain-watcher to only send historical data with the specified range and +`ethSubscription.historicalDataOnly` will tell ipld-eth-server to only send historical data with the specified range and not stream forward syncing data `ethSubscription.startingBlock` is the starting block number for the range to receive data in @@ -114,43 +114,43 @@ setting to 0 means the process will continue streaming indefinitely. `ethSubscription.headerFilter` has two sub-options: `off` and `uncles`. -- Setting `off` to true tells ipfs-blockchain-watcher to not send any headers to the subscriber -- setting `uncles` to true tells ipfs-blockchain-watcher to send uncles in addition to normal headers. +- Setting `off` to true tells ipld-eth-server to not send any headers to the subscriber +- setting `uncles` to true tells ipld-eth-server to send uncles in addition to normal headers. `ethSubscription.txFilter` has three sub-options: `off`, `src`, and `dst`. -- Setting `off` to true tells ipfs-blockchain-watcher to not send any transactions to the subscriber +- Setting `off` to true tells ipld-eth-server to not send any transactions to the subscriber - `src` and `dst` are string arrays which can be filled with ETH addresses to filter transactions for, -if they have any addresses then ipfs-blockchain-watcher will only send transactions that were sent or received by the addresses contained +if they have any addresses then ipld-eth-server will only send transactions that were sent or received by the addresses contained in `src` and `dst`, respectively. `ethSubscription.receiptFilter` has four sub-options: `off`, `topics`, `contracts` and `matchTxs`. -- Setting `off` to true tells ipfs-blockchain-watcher to not send any receipts to the subscriber +- Setting `off` to true tells ipld-eth-server to not send any receipts to the subscriber - `topic0s` is a string array which can be filled with event topics to filter for, -if it has any topics then ipfs-blockchain-watcher will only send receipts that contain logs which have that topic0. +if it has any topics then ipld-eth-server will only send receipts that contain logs which have that topic0. - `contracts` is a string array which can be filled with contract addresses to filter for, if it contains any contract addresses the watcher will only send receipts that correspond to one of those contracts. - `matchTrxs` is a bool which when set to true any receipts that correspond to filtered for transactions will be sent by the watcher, regardless of whether or not the receipt satisfies the `topics` or `contracts` filters. `ethSubscription.stateFilter` has three sub-options: `off`, `addresses`, and `intermediateNodes`. -- Setting `off` to true tells ipfs-blockchain-watcher to not send any state data to the subscriber +- Setting `off` to true tells ipld-eth-server to not send any state data to the subscriber - `addresses` is a string array which can be filled with ETH addresses to filter state for, -if it has any addresses then ipfs-blockchain-watcher will only send state leafs (accounts) corresponding to those account addresses. -- By default ipfs-blockchain-watcher only sends along state leafs, to receive branch and extension nodes as well `intermediateNodes` can be set to `true`. +if it has any addresses then ipld-eth-server will only send state leafs (accounts) corresponding to those account addresses. +- By default ipld-eth-server only sends along state leafs, to receive branch and extension nodes as well `intermediateNodes` can be set to `true`. `ethSubscription.storageFilter` has four sub-options: `off`, `addresses`, `storageKeys`, and `intermediateNodes`. -- Setting `off` to true tells ipfs-blockchain-watcher to not send any storage data to the subscriber +- Setting `off` to true tells ipld-eth-server to not send any storage data to the subscriber - `addresses` is a string array which can be filled with ETH addresses to filter storage for, -if it has any addresses then ipfs-blockchain-watcher will only send storage nodes from the storage tries at those state addresses. +if it has any addresses then ipld-eth-server will only send storage nodes from the storage tries at those state addresses. - `storageKeys` is another string array that can be filled with storage keys to filter storage data for. It is important to note that the storage keys need to be the actual keccak256 hashes, whereas the addresses in the `addresses` fields are pre-hashed ETH addresses. -- By default ipfs-blockchain-watcher only sends along storage leafs, to receive branch and extension nodes as well `intermediateNodes` can be set to `true`. +- By default ipld-eth-server only sends along storage leafs, to receive branch and extension nodes as well `intermediateNodes` can be set to `true`. ### Bitcoin RPC Subscription: -An example of how to subscribe to a real-time Bitcoin data feed from ipfs-blockchain-watcher using the `Stream` RPC method is provided below +An example of how to subscribe to a real-time Bitcoin data feed from ipld-eth-server using the `Stream` RPC method is provided below ```go package main @@ -160,9 +160,9 @@ An example of how to subscribe to a real-time Bitcoin data feed from ipfs-blockc "github.com/ethereum/go-ethereum/rpc" "github.com/spf13/viper" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/btc" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/client" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/watch" + "github.com/vulcanize/ipld-eth-server/pkg/btc" + "github.com/vulcanize/ipld-eth-server/pkg/client" + "github.com/vulcanize/ipld-eth-server/pkg/watch" ) config, _ := btc.NewBtcSubscriptionConfig() @@ -206,12 +206,12 @@ The .toml file being used to fill the Bitcoin subscription config would look som These configuration parameters are broken down as follows: -`btcSubscription.wsPath` is used to define the ipfs-blockchain-watcher ws url OR ipc endpoint to subscribe to +`btcSubscription.wsPath` is used to define the ipld-eth-server ws url OR ipc endpoint to subscribe to -`btcSubscription.historicalData` specifies whether or not ipfs-blockchain-watcher should look up historical data in its cache and -send that to the subscriber, if this is set to `false` then ipfs-blockchain-watcher only streams newly synced/incoming data +`btcSubscription.historicalData` specifies whether or not ipld-eth-server should look up historical data in its cache and +send that to the subscriber, if this is set to `false` then ipld-eth-server only streams newly synced/incoming data -`btcSubscription.historicalDataOnly` will tell ipfs-blockchain-watcher to only send historical data with the specified range and +`btcSubscription.historicalDataOnly` will tell ipld-eth-server to only send historical data with the specified range and not stream forward syncing data `btcSubscription.startingBlock` is the starting block number for the range to receive data in @@ -221,20 +221,20 @@ setting to 0 means the process will continue streaming indefinitely. `btcSubscription.headerFilter` has one sub-option: `off`. -- Setting `off` to true tells ipfs-blockchain-watcher to +- Setting `off` to true tells ipld-eth-server to not send any headers to the subscriber. - Additional header-filtering options will be added in the future. `btcSubscription.txFilter` has seven sub-options: `off`, `segwit`, `witnessHashes`, `indexes`, `pkScriptClass`, `multiSig`, and `addresses`. -- Setting `off` to true tells ipfs-blockchain-watcher to not send any transactions to the subscriber. -- Setting `segwit` to true tells ipfs-blockchain-watcher to only send segwit transactions. -- `witnessHashes` is a string array that can be filled with witness hash string; if it contains any hashes ipfs-blockchain-watcher will only send transactions that contain one of those hashes. -- `indexes` is an int64 array that can be filled with tx index numbers; if it contains any integers ipfs-blockchain-watcher will only send transactions at those indexes (e.g. `[0]` will send only coinbase transactions) -- `pkScriptClass` is an uint8 array that can be filled with pk script class numbers; if it contains any integers ipfs-blockchain-watcher will only send transactions that have at least one tx output with one of the specified pkscript classes; +- Setting `off` to true tells ipld-eth-server to not send any transactions to the subscriber. +- Setting `segwit` to true tells ipld-eth-server to only send segwit transactions. +- `witnessHashes` is a string array that can be filled with witness hash string; if it contains any hashes ipld-eth-server will only send transactions that contain one of those hashes. +- `indexes` is an int64 array that can be filled with tx index numbers; if it contains any integers ipld-eth-server will only send transactions at those indexes (e.g. `[0]` will send only coinbase transactions) +- `pkScriptClass` is an uint8 array that can be filled with pk script class numbers; if it contains any integers ipld-eth-server will only send transactions that have at least one tx output with one of the specified pkscript classes; possible class types are 0 through 8 as defined [here](https://github.com/btcsuite/btcd/blob/master/txscript/standard.go#L52). -- Setting `multisig` to true tells ipfs-blockchain-watcher to send only multi-sig transactions- to send only transaction that have at least one tx output that requires more than one signature to spend. -- `addresses` is a string array that can be filled with btc address strings; if it contains any addresses ipfs-blockchain-watcher will only send transactions that have at least one tx output with at least one of the provided addresses. +- Setting `multisig` to true tells ipld-eth-server to send only multi-sig transactions- to send only transaction that have at least one tx output that requires more than one signature to spend. +- `addresses` is a string array that can be filled with btc address strings; if it contains any addresses ipld-eth-server will only send transactions that have at least one tx output with at least one of the provided addresses. ### Native API Recapitulation: @@ -242,7 +242,7 @@ In addition to providing novel Postgraphile and RPC-Subscription endpoints, we a standard chain APIs. This will allow direct compatibility with software that already makes use of the standard interfaces. #### Ethereum JSON-RPC API -ipfs-blockchain-watcher currently faithfully recapitulates portions of the Ethereum JSON-RPC api standard. +ipld-eth-server currently faithfully recapitulates portions of the Ethereum JSON-RPC api standard. The currently supported endpoints include: `eth_blockNumber` diff --git a/documentation/ipfs.md b/documentation/ipfs.md deleted file mode 100644 index 3997c1d1..00000000 --- a/documentation/ipfs.md +++ /dev/null @@ -1,53 +0,0 @@ -### PG-IPFS configuration - -This doc walks through the steps to install IPFS and configure it to use Postgres as its backing datastore. - -1. Start by downloading and moving into the IPFS repo: - -`go get github.com/ipfs/go-ipfs` - -`cd $GOPATH/src/github.com/ipfs/go-ipfs` - -2. Add the [Postgres-supporting fork](https://github.com/vulcanize/go-ipfs) and switch over to it: - -`git remote add vulcanize https://github.com/vulcanize/go-ipfs.git` - -`git fetch vulcanize` - -`git checkout -b postgres_update tags/v0.4.22-alpha` - -3. Now install this fork of ipfs, first be sure to remove any previous installation: - -`make install` - -4. Check that is installed properly by running: - -`ipfs` - -You should see the CLI info/help output. - -5. Now we initialize with the `postgresds` profile. -If ipfs was previously initialized we will need to remove the old profile first. -We also need to provide env variables for the postgres connection: - -We can either set these manually, e.g. -```bash -export IPFS_PGHOST= -export IPFS_PGUSER= -export IPFS_PGDATABASE= -export IPFS_PGPORT= -export IPFS_PGPASSWORD= -``` - -And then run the ipfs command: - -`ipfs init --profile=postgresds` - -Or we can use the pre-made script at `GOPATH/src/github.com/ipfs/go-ipfs/misc/utility/ipfs_postgres.sh` -which has usage: - -`./ipfs_postgres.sh "` - -and will ask us to enter the password, avoiding storing it to an ENV variable. - -Once we have initialized ipfs, that is all we need to do with it- we do not need to run a daemon during the subsequent processes. \ No newline at end of file diff --git a/environments/example.toml b/environments/example.toml new file mode 100644 index 00000000..67d319b2 --- /dev/null +++ b/environments/example.toml @@ -0,0 +1,14 @@ +[database] + name = "vulcanize_public" # $DATABASE_NAME + hostname = "localhost" # $DATABASE_HOSTNAME + port = 5432 # $DATABASE_PORT + user = "postgres" # $DATABASE_USER + password = "" # $DATABASE_PASSWORD + +[log] + level = "info" # $LOGRUS_LEVEL + +[server] + ipcPath = "~/.vulcanize/vulcanize.ipc" # $SERVER_IPC_PATH + wsPath = "127.0.0.1:8081" # $SERVER_WS_PATH + httpPath = "127.0.0.1:8082" # $SERVER_HTTP_PATH \ No newline at end of file diff --git a/environments/superNodeETH.toml b/environments/superNodeETH.toml deleted file mode 100644 index 78bf9be4..00000000 --- a/environments/superNodeETH.toml +++ /dev/null @@ -1,49 +0,0 @@ -[database] - name = "vulcanize_testing" # $DATABASE_NAME - hostname = "localhost" # $DATABASE_HOSTNAME - port = 5432 # $DATABASE_PORT - user = "postgres" # $DATABASE_USER - password = "" # $DATABASE_PASSWORD - - [database.sync] - maxIdle = 1 - [database.backFill] - maxIdle = 5 - -[log] - level = "debug" # $LOGRUS_LEVEL - -[resync] - chain = "ethereum" # $RESYNC_CHAIN - type = "state" # $RESYNC_TYPE - start = 0 # $RESYNC_START - stop = 0 # $RESYNC_STOP - batchSize = 5 # $RESYNC_BATCH_SIZE - batchNumber = 5 # $RESYNC_BATCH_NUMBER - timeout = 300 # $HTTP_TIMEOUT - clearOldCache = true # $RESYNC_CLEAR_OLD_CACHE - resetValidation = true # $RESYNC_RESET_VALIDATION - -[watcher] - chain = "ethereum" # $SUPERNODE_CHAIN - server = false # $SUPERNODE_SERVER - ipcPath = "~/.vulcanize/vulcanize.ipc" # $SUPERNODE_IPC_PATH - wsPath = "127.0.0.1:8081" # $SUPERNODE_WS_PATH - httpPath = "127.0.0.1:8082" # $SUPERNODE_HTTP_PATH - sync = true # $SUPERNODE_SYNC - workers = 1 # $SUPERNODE_WORKERS - backFill = false # $SUPERNODE_BACKFILL - frequency = 15 # $SUPERNODE_FREQUENCY - batchSize = 5 # $SUPERNODE_BATCH_SIZE - batchNumber = 5 # $SUPERNODE_BATCH_NUMBER - timeout = 300 # $HTTP_TIMEOUT - validationLevel = 1 # $SUPERNODE_VALIDATION_LEVEL - -[ethereum] - wsPath = "127.0.0.1:8546" # $ETH_WS_PATH - httpPath = "127.0.0.1:8545" # $ETH_HTTP_PATH - nodeID = "arch1" # $ETH_NODE_ID - clientName = "Geth" # $ETH_CLIENT_NAME - genesisBlock = "0xd4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3" # $ETH_GENESIS_BLOCK - networkID = "1" # $ETH_NETWORK_ID - chainID = "1" # $ETH_CHAIN_ID diff --git a/environments/superNodeSubscription.toml b/environments/superNodeSubscription.toml deleted file mode 100644 index 14a82379..00000000 --- a/environments/superNodeSubscription.toml +++ /dev/null @@ -1,30 +0,0 @@ -[watcher] - [watcher.ethSubscription] - historicalData = false - historicalDataOnly = false - startingBlock = 0 - endingBlock = 0 - wsPath = "ws://127.0.0.1:8080" - [watcher.ethSubscription.headerFilter] - off = false - uncles = false - [watcher.ethSubscription.txFilter] - off = false - src = [] - dst = [] - [watcher.ethSubscription.receiptFilter] - off = false - contracts = [] - topic0s = [] - topic1s = [] - topic2s = [] - topic3s = [] - [watcher.ethSubscription.stateFilter] - off = false - addresses = [] - intermediateNodes = false - [watcher.ethSubscription.storageFilter] - off = true - addresses = [] - storageKeys = [] - intermediateNodes = false \ No newline at end of file diff --git a/go.mod b/go.mod index bf3fde28..19a272db 100644 --- a/go.mod +++ b/go.mod @@ -1,4 +1,4 @@ -module github.com/vulcanize/ipfs-blockchain-watcher +module github.com/vulcanize/ipld-eth-server go 1.13 diff --git a/go.sum b/go.sum index 4b72d9a9..9982aa34 100644 --- a/go.sum +++ b/go.sum @@ -936,6 +936,7 @@ github.com/vulcanize/go-ethereum v1.9.11-statediff-0.0.2 h1:ebv2bWocCmNKGnpHtRjS github.com/vulcanize/go-ethereum v1.9.11-statediff-0.0.2/go.mod h1:7oC0Ni6dosMv5pxMigm6s0hN8g4haJMBnqmmo0D9YfQ= github.com/vulcanize/go-ethereum v1.9.11-statediff-0.0.5 h1:U+BqhjRLR22e9OEm8cgWC3Eq3bh8G6azjNpXeenfCG4= github.com/vulcanize/go-ethereum v1.9.11-statediff-0.0.5/go.mod h1:7oC0Ni6dosMv5pxMigm6s0hN8g4haJMBnqmmo0D9YfQ= +github.com/vulcanize/ipfs-blockchain-watcher v0.0.9 h1:pKL378Wtuhi8HPw3ZqV/3UBgJngUw1Ke4w5GKVM52pY= github.com/vulcanize/pg-ipfs-ethdb v0.0.1-alpha h1:Y7j0Hw1jgVVOg+eUGUr7OgH+gOBID0DwbsfZV1KoL7I= github.com/vulcanize/pg-ipfs-ethdb v0.0.1-alpha/go.mod h1:OuqE4r2LGWAtDVx3s1yaAzDcwy+LEAqrWaE1L8UfrGY= github.com/wangjia184/sortedset v0.0.0-20160527075905-f5d03557ba30/go.mod h1:YkocrP2K2tcw938x9gCOmT5G5eCD6jsTz0SZuyAqwIE= diff --git a/pkg/builders/builders.go b/pkg/builders/builders.go deleted file mode 100644 index f40c526e..00000000 --- a/pkg/builders/builders.go +++ /dev/null @@ -1,168 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package builders - -import ( - "fmt" - "time" - - "github.com/btcsuite/btcd/chaincfg" - "github.com/btcsuite/btcd/rpcclient" - "github.com/ethereum/go-ethereum/rpc" - - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/postgres" - "github.com/vulcanize/ipld-eth-server/pkg/btc" - "github.com/vulcanize/ipld-eth-server/pkg/eth" - "github.com/vulcanize/ipld-eth-server/pkg/shared" -) - -// NewResponseFilterer constructs a ResponseFilterer for the provided chain type -func NewResponseFilterer(chain shared.ChainType) (shared.ResponseFilterer, error) { - switch chain { - case shared.Ethereum: - return eth.NewResponseFilterer(), nil - case shared.Bitcoin: - return btc.NewResponseFilterer(), nil - default: - return nil, fmt.Errorf("invalid chain %s for filterer constructor", chain.String()) - } -} - -// NewCIDRetriever constructs a CIDRetriever for the provided chain type -func NewCIDRetriever(chain shared.ChainType, db *postgres.DB) (shared.CIDRetriever, error) { - switch chain { - case shared.Ethereum: - return eth.NewCIDRetriever(db), nil - case shared.Bitcoin: - return btc.NewCIDRetriever(db), nil - default: - return nil, fmt.Errorf("invalid chain %s for retriever constructor", chain.String()) - } -} - -// NewPayloadStreamer constructs a PayloadStreamer for the provided chain type -func NewPayloadStreamer(chain shared.ChainType, clientOrConfig interface{}) (shared.PayloadStreamer, chan shared.RawChainData, error) { - switch chain { - case shared.Ethereum: - ethClient, ok := clientOrConfig.(*rpc.Client) - if !ok { - return nil, nil, fmt.Errorf("ethereum payload streamer constructor expected client type %T got %T", &rpc.Client{}, clientOrConfig) - } - streamChan := make(chan shared.RawChainData, eth.PayloadChanBufferSize) - return eth.NewPayloadStreamer(ethClient), streamChan, nil - case shared.Bitcoin: - btcClientConn, ok := clientOrConfig.(*rpcclient.ConnConfig) - if !ok { - return nil, nil, fmt.Errorf("bitcoin payload streamer constructor expected client config type %T got %T", rpcclient.ConnConfig{}, clientOrConfig) - } - streamChan := make(chan shared.RawChainData, btc.PayloadChanBufferSize) - return btc.NewHTTPPayloadStreamer(btcClientConn), streamChan, nil - default: - return nil, nil, fmt.Errorf("invalid chain %s for streamer constructor", chain.String()) - } -} - -// NewPaylaodFetcher constructs a PayloadFetcher for the provided chain type -func NewPaylaodFetcher(chain shared.ChainType, client interface{}, timeout time.Duration) (shared.PayloadFetcher, error) { - switch chain { - case shared.Ethereum: - batchClient, ok := client.(*rpc.Client) - if !ok { - return nil, fmt.Errorf("ethereum payload fetcher constructor expected client type %T got %T", &rpc.Client{}, client) - } - return eth.NewPayloadFetcher(batchClient, timeout), nil - case shared.Bitcoin: - connConfig, ok := client.(*rpcclient.ConnConfig) - if !ok { - return nil, fmt.Errorf("bitcoin payload fetcher constructor expected client type %T got %T", &rpcclient.Client{}, client) - } - return btc.NewPayloadFetcher(connConfig) - default: - return nil, fmt.Errorf("invalid chain %s for payload fetcher constructor", chain.String()) - } -} - -// NewPayloadConverter constructs a PayloadConverter for the provided chain type -func NewPayloadConverter(chainType shared.ChainType, chainID uint64) (shared.PayloadConverter, error) { - switch chainType { - case shared.Ethereum: - chainConfig, err := eth.ChainConfig(chainID) - if err != nil { - return nil, err - } - return eth.NewPayloadConverter(chainConfig), nil - case shared.Bitcoin: - return btc.NewPayloadConverter(&chaincfg.MainNetParams), nil - default: - return nil, fmt.Errorf("invalid chain %s for converter constructor", chainType.String()) - } -} - -// NewIPLDFetcher constructs an IPLDFetcher for the provided chain type -func NewIPLDFetcher(chain shared.ChainType, db *postgres.DB) (shared.IPLDFetcher, error) { - switch chain { - case shared.Ethereum: - return eth.NewIPLDFetcher(db), nil - case shared.Bitcoin: - return btc.NewIPLDFetcher(db), nil - default: - return nil, fmt.Errorf("invalid chain %s for IPLD fetcher constructor", chain.String()) - } -} - -// NewIPLDPublisher constructs an IPLDPublisher for the provided chain type -func NewIPLDPublisher(chain shared.ChainType, db *postgres.DB) (shared.IPLDPublisher, error) { - switch chain { - case shared.Ethereum: - return eth.NewIPLDPublisher(db), nil - case shared.Bitcoin: - return btc.NewIPLDPublisher(db), nil - default: - return nil, fmt.Errorf("invalid chain %s for publisher constructor", chain.String()) - } -} - -// NewPublicAPI constructs a PublicAPI for the provided chain type -func NewPublicAPI(chain shared.ChainType, db *postgres.DB) (rpc.API, error) { - switch chain { - case shared.Ethereum: - backend, err := eth.NewEthBackend(db) - if err != nil { - return rpc.API{}, err - } - return rpc.API{ - Namespace: eth.APIName, - Version: eth.APIVersion, - Service: eth.NewPublicEthAPI(backend), - Public: true, - }, nil - default: - return rpc.API{}, fmt.Errorf("invalid chain %s for public api constructor", chain.String()) - } -} - -// NewCleaner constructs a Cleaner for the provided chain type -func NewCleaner(chain shared.ChainType, db *postgres.DB) (shared.Cleaner, error) { - switch chain { - case shared.Ethereum: - return eth.NewCleaner(db), nil - case shared.Bitcoin: - return btc.NewCleaner(db), nil - default: - return nil, fmt.Errorf("invalid chain %s for cleaner constructor", chain.String()) - } -} diff --git a/pkg/eth/cid_retriever.go b/pkg/eth/cid_retriever.go index e62ecffe..3d83f62e 100644 --- a/pkg/eth/cid_retriever.go +++ b/pkg/eth/cid_retriever.go @@ -17,7 +17,6 @@ package eth import ( - "database/sql" "fmt" "math/big" @@ -31,7 +30,6 @@ import ( "github.com/vulcanize/ipfs-blockchain-watcher/pkg/postgres" "github.com/vulcanize/ipld-eth-server/pkg/shared" - "github.com/vulcanize/ipld-eth-server/utils" ) // Retriever interface for substituting mocks in tests @@ -446,56 +444,6 @@ func (ecr *CIDRetriever) RetrieveStorageCIDs(tx *sqlx.Tx, storageFilter StorageF return storageNodeCIDs, tx.Select(&storageNodeCIDs, pgStr, args...) } -// RetrieveGapsInData is used to find the the block numbers at which we are missing data in the db -// it finds the union of heights where no data exists and where the times_validated is lower than the validation level -func (ecr *CIDRetriever) RetrieveGapsInData(validationLevel int) ([]eth2.DBGap, error) { - log.Info("searching for gaps in the eth ipfs watcher database") - startingBlock, err := ecr.RetrieveFirstBlockNumber() - if err != nil { - return nil, fmt.Errorf("eth CIDRetriever RetrieveFirstBlockNumber error: %v", err) - } - var initialGap []eth2.DBGap - if startingBlock != 0 { - stop := uint64(startingBlock - 1) - log.Infof("found gap at the beginning of the eth sync from 0 to %d", stop) - initialGap = []eth2.DBGap{{ - Start: 0, - Stop: stop, - }} - } - - pgStr := `SELECT header_cids.block_number + 1 AS start, min(fr.block_number) - 1 AS stop FROM eth.header_cids - LEFT JOIN eth.header_cids r on eth.header_cids.block_number = r.block_number - 1 - LEFT JOIN eth.header_cids fr on eth.header_cids.block_number < fr.block_number - WHERE r.block_number is NULL and fr.block_number IS NOT NULL - GROUP BY header_cids.block_number, r.block_number` - results := make([]struct { - Start uint64 `db:"start"` - Stop uint64 `db:"stop"` - }, 0) - if err := ecr.db.Select(&results, pgStr); err != nil && err != sql.ErrNoRows { - return nil, err - } - emptyGaps := make([]eth2.DBGap, len(results)) - for i, res := range results { - emptyGaps[i] = eth2.DBGap{ - Start: res.Start, - Stop: res.Stop, - } - } - - // Find sections of blocks where we are below the validation level - // There will be no overlap between these "gaps" and the ones above - pgStr = `SELECT block_number FROM eth.header_cids - WHERE times_validated < $1 - ORDER BY block_number` - var heights []uint64 - if err := ecr.db.Select(&heights, pgStr, validationLevel); err != nil && err != sql.ErrNoRows { - return nil, err - } - return append(append(initialGap, emptyGaps...), utils.MissingHeightsToGaps(heights)...), nil -} - // RetrieveBlockByHash returns all of the CIDs needed to compose an entire block, for a given block hash func (ecr *CIDRetriever) RetrieveBlockByHash(blockHash common.Hash) (eth2.HeaderModel, []eth2.UncleModel, []eth2.TxModel, []eth2.ReceiptModel, error) { log.Debug("retrieving block cids for block hash ", blockHash.String()) diff --git a/pkg/eth/mocks/batch_client.go b/pkg/eth/mocks/batch_client.go deleted file mode 100644 index a4b02729..00000000 --- a/pkg/eth/mocks/batch_client.go +++ /dev/null @@ -1,86 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package mocks - -import ( - "context" - "encoding/json" - "errors" - - "github.com/ethereum/go-ethereum/rpc" - "github.com/ethereum/go-ethereum/statediff" -) - -// BackFillerClient is a mock client for use in backfiller tests -type BackFillerClient struct { - MappedStateDiffAt map[uint64][]byte -} - -// SetReturnDiffAt method to set what statediffs the mock client returns -func (mc *BackFillerClient) SetReturnDiffAt(height uint64, diffPayload statediff.Payload) error { - if mc.MappedStateDiffAt == nil { - mc.MappedStateDiffAt = make(map[uint64][]byte) - } - by, err := json.Marshal(diffPayload) - if err != nil { - return err - } - mc.MappedStateDiffAt[height] = by - return nil -} - -// BatchCall mockClient method to simulate batch call to geth -func (mc *BackFillerClient) BatchCall(batch []rpc.BatchElem) error { - if mc.MappedStateDiffAt == nil { - return errors.New("mockclient needs to be initialized with statediff payloads and errors") - } - for _, batchElem := range batch { - if len(batchElem.Args) < 1 { - return errors.New("expected batch elem to contain an argument(s)") - } - blockHeight, ok := batchElem.Args[0].(uint64) - if !ok { - return errors.New("expected first batch elem argument to be a uint64") - } - err := json.Unmarshal(mc.MappedStateDiffAt[blockHeight], batchElem.Result) - if err != nil { - return err - } - } - return nil -} - -// BatchCallContext mockClient method to simulate batch call to geth -func (mc *BackFillerClient) BatchCallContext(ctx context.Context, batch []rpc.BatchElem) error { - if mc.MappedStateDiffAt == nil { - return errors.New("mockclient needs to be initialized with statediff payloads and errors") - } - for _, batchElem := range batch { - if len(batchElem.Args) < 1 { - return errors.New("expected batch elem to contain an argument(s)") - } - blockHeight, ok := batchElem.Args[0].(uint64) - if !ok { - return errors.New("expected batch elem first argument to be a uint64") - } - err := json.Unmarshal(mc.MappedStateDiffAt[blockHeight], batchElem.Result) - if err != nil { - return err - } - } - return nil -} diff --git a/pkg/eth/mocks/converter.go b/pkg/eth/mocks/converter.go deleted file mode 100644 index 50158a4e..00000000 --- a/pkg/eth/mocks/converter.go +++ /dev/null @@ -1,66 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package mocks - -import ( - "fmt" - - "github.com/ethereum/go-ethereum/statediff" - - "github.com/vulcanize/ipld-eth-server/pkg/eth" - "github.com/vulcanize/ipld-eth-server/pkg/shared" -) - -// PayloadConverter is the underlying struct for the Converter interface -type PayloadConverter struct { - PassedStatediffPayload statediff.Payload - ReturnIPLDPayload eth.ConvertedPayload - ReturnErr error -} - -// Convert method is used to convert a geth statediff.Payload to a IPLDPayload -func (pc *PayloadConverter) Convert(payload shared.RawChainData) (shared.ConvertedData, error) { - stateDiffPayload, ok := payload.(statediff.Payload) - if !ok { - return nil, fmt.Errorf("convert expected payload type %T got %T", statediff.Payload{}, payload) - } - pc.PassedStatediffPayload = stateDiffPayload - return pc.ReturnIPLDPayload, pc.ReturnErr -} - -// IterativePayloadConverter is the underlying struct for the Converter interface -type IterativePayloadConverter struct { - PassedStatediffPayload []statediff.Payload - ReturnIPLDPayload []eth.ConvertedPayload - ReturnErr error - iteration int -} - -// Convert method is used to convert a geth statediff.Payload to a IPLDPayload -func (pc *IterativePayloadConverter) Convert(payload shared.RawChainData) (shared.ConvertedData, error) { - stateDiffPayload, ok := payload.(statediff.Payload) - if !ok { - return nil, fmt.Errorf("convert expected payload type %T got %T", statediff.Payload{}, payload) - } - pc.PassedStatediffPayload = append(pc.PassedStatediffPayload, stateDiffPayload) - if len(pc.ReturnIPLDPayload) < pc.iteration+1 { - return nil, fmt.Errorf("IterativePayloadConverter does not have a payload to return at iteration %d", pc.iteration) - } - returnPayload := pc.ReturnIPLDPayload[pc.iteration] - pc.iteration++ - return returnPayload, pc.ReturnErr -} diff --git a/pkg/eth/mocks/indexer.go b/pkg/eth/mocks/indexer.go deleted file mode 100644 index cee84767..00000000 --- a/pkg/eth/mocks/indexer.go +++ /dev/null @@ -1,41 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package mocks - -import ( - "fmt" - - "github.com/vulcanize/ipld-eth-server/pkg/shared" - - "github.com/vulcanize/ipld-eth-server/pkg/eth" -) - -// CIDIndexer is the underlying struct for the Indexer interface -type CIDIndexer struct { - PassedCIDPayload []*eth.CIDPayload - ReturnErr error -} - -// Index indexes a cidPayload in Postgres -func (repo *CIDIndexer) Index(cids shared.CIDsForIndexing) error { - cidPayload, ok := cids.(*eth.CIDPayload) - if !ok { - return fmt.Errorf("index expected cids type %T got %T", ð.CIDPayload{}, cids) - } - repo.PassedCIDPayload = append(repo.PassedCIDPayload, cidPayload) - return repo.ReturnErr -} diff --git a/pkg/eth/mocks/publisher.go b/pkg/eth/mocks/publisher.go deleted file mode 100644 index 5758b277..00000000 --- a/pkg/eth/mocks/publisher.go +++ /dev/null @@ -1,61 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package mocks - -import ( - "fmt" - - "github.com/vulcanize/ipld-eth-server/pkg/shared" - - "github.com/vulcanize/ipld-eth-server/pkg/eth" -) - -// IPLDPublisher is the underlying struct for the Publisher interface -type IPLDPublisher struct { - PassedIPLDPayload eth.ConvertedPayload - ReturnCIDPayload *eth.CIDPayload - ReturnErr error -} - -// Publish publishes an IPLDPayload to IPFS and returns the corresponding CIDPayload -func (pub *IPLDPublisher) Publish(payload shared.ConvertedData) error { - ipldPayload, ok := payload.(eth.ConvertedPayload) - if !ok { - return fmt.Errorf("publish expected payload type %T got %T", ð.ConvertedPayload{}, payload) - } - pub.PassedIPLDPayload = ipldPayload - return pub.ReturnErr -} - -// IterativeIPLDPublisher is the underlying struct for the Publisher interface; used in testing -type IterativeIPLDPublisher struct { - PassedIPLDPayload []eth.ConvertedPayload - ReturnCIDPayload []*eth.CIDPayload - ReturnErr error - iteration int -} - -// Publish publishes an IPLDPayload to IPFS and returns the corresponding CIDPayload -func (pub *IterativeIPLDPublisher) Publish(payload shared.ConvertedData) error { - ipldPayload, ok := payload.(eth.ConvertedPayload) - if !ok { - return fmt.Errorf("publish expected payload type %T got %T", ð.ConvertedPayload{}, payload) - } - pub.PassedIPLDPayload = append(pub.PassedIPLDPayload, ipldPayload) - pub.iteration++ - return pub.ReturnErr -} diff --git a/pkg/eth/mocks/stream_client.go b/pkg/eth/mocks/stream_client.go deleted file mode 100644 index 6ae821b4..00000000 --- a/pkg/eth/mocks/stream_client.go +++ /dev/null @@ -1,44 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package mocks - -import ( - "context" - - "github.com/ethereum/go-ethereum/rpc" -) - -type StreamClient struct { - passedContext context.Context - passedResult interface{} - passedNamespace string - passedPayloadChan interface{} - passedSubscribeArgs []interface{} -} - -func (client *StreamClient) Subscribe(ctx context.Context, namespace string, payloadChan interface{}, args ...interface{}) (*rpc.ClientSubscription, error) { - client.passedNamespace = namespace - client.passedPayloadChan = payloadChan - client.passedContext = ctx - - for _, arg := range args { - client.passedSubscribeArgs = append(client.passedSubscribeArgs, arg) - } - - subscription := rpc.ClientSubscription{} - return &subscription, nil -} diff --git a/pkg/serve/config.go b/pkg/serve/config.go index 5a525c30..0a2a0197 100644 --- a/pkg/serve/config.go +++ b/pkg/serve/config.go @@ -46,7 +46,6 @@ type Config struct { WSEndpoint string HTTPEndpoint string IPCEndpoint string - NodeInfo node.Info } // NewConfig is used to initialize a watcher config from a .toml file @@ -80,19 +79,12 @@ func NewConfig() (*Config, error) { } c.HTTPEndpoint = httpPath overrideDBConnConfig(&c.DBConfig) - serveDB := utils.LoadPostgres(c.DBConfig, c.NodeInfo) + serveDB := utils.LoadPostgres(c.DBConfig, postgres.Info{}) c.DB = &serveDB return c, nil } -type mode string - -var ( - Sync mode = "sync" - Serve mode = "serve" -) - func overrideDBConnConfig(con *postgres.Config) { viper.BindEnv("database.server.maxIdle", SERVER_MAX_IDLE_CONNECTIONS) viper.BindEnv("database.server.maxOpen", SERVER_MAX_OPEN_CONNECTIONS) diff --git a/pkg/serve/service.go b/pkg/serve/service.go index adaf027e..3226d6e2 100644 --- a/pkg/serve/service.go +++ b/pkg/serve/service.go @@ -52,8 +52,6 @@ type Server interface { Subscribe(id rpc.ID, sub chan<- SubscriptionPayload, quitChan chan<- bool, params eth.SubscriptionSettings) // Method to unsubscribe from the service Unsubscribe(id rpc.ID) - // Method to access the node info for the service - Node() *node.Info // Method to access chain type Chain() shared.ChainType } @@ -74,8 +72,6 @@ type Service struct { Subscriptions map[common.Hash]map[rpc.ID]Subscription // A mapping of subscription params hash to the corresponding subscription params SubscriptionTypes map[common.Hash]eth.SubscriptionSettings - // Info for the Geth node that this watcher is working with - NodeInfo *node.Info // Underlying db db *postgres.DB // wg for syncing serve processes @@ -355,11 +351,6 @@ func (sap *Service) Stop() error { return nil } -// Node returns the node info for this service -func (sap *Service) Node() *node.Info { - return sap.NodeInfo -} - // Chain returns the chain type for this service func (sap *Service) Chain() shared.ChainType { return shared.Ethereum diff --git a/pkg/shared/mocks/payload_fetcher.go b/pkg/shared/mocks/payload_fetcher.go deleted file mode 100644 index 218cd923..00000000 --- a/pkg/shared/mocks/payload_fetcher.go +++ /dev/null @@ -1,50 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package mocks - -import ( - "errors" - "sync/atomic" - - "github.com/vulcanize/ipld-eth-server/pkg/shared" -) - -// PayloadFetcher mock for tests -type PayloadFetcher struct { - PayloadsToReturn map[uint64]shared.RawChainData - FetchErrs map[uint64]error - CalledAtBlockHeights [][]uint64 - CalledTimes int64 -} - -// FetchAt mock method -func (fetcher *PayloadFetcher) FetchAt(blockHeights []uint64) ([]shared.RawChainData, error) { - if fetcher.PayloadsToReturn == nil { - return nil, errors.New("mock StateDiffFetcher needs to be initialized with payloads to return") - } - atomic.AddInt64(&fetcher.CalledTimes, 1) // thread-safe increment - fetcher.CalledAtBlockHeights = append(fetcher.CalledAtBlockHeights, blockHeights) - results := make([]shared.RawChainData, 0, len(blockHeights)) - for _, height := range blockHeights { - results = append(results, fetcher.PayloadsToReturn[height]) - err, ok := fetcher.FetchErrs[height] - if ok && err != nil { - return nil, err - } - } - return results, nil -} diff --git a/pkg/shared/mocks/retriever.go b/pkg/shared/mocks/retriever.go deleted file mode 100644 index ba3843f9..00000000 --- a/pkg/shared/mocks/retriever.go +++ /dev/null @@ -1,64 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package mocks - -import ( - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/postgres" - "github.com/vulcanize/ipld-eth-server/pkg/shared" -) - -// CIDRetriever is a mock CID retriever for use in tests -type CIDRetriever struct { - GapsToRetrieve []shared.Gap - GapsToRetrieveErr error - CalledTimes int - FirstBlockNumberToReturn int64 - RetrieveFirstBlockNumberErr error -} - -// RetrieveCIDs mock method -func (*CIDRetriever) Retrieve(filter shared.SubscriptionSettings, blockNumber int64) ([]shared.CIDsForFetching, bool, error) { - panic("implement me") -} - -// RetrieveLastBlockNumber mock method -func (*CIDRetriever) RetrieveLastBlockNumber() (int64, error) { - panic("implement me") -} - -// RetrieveFirstBlockNumber mock method -func (mcr *CIDRetriever) RetrieveFirstBlockNumber() (int64, error) { - return mcr.FirstBlockNumberToReturn, mcr.RetrieveFirstBlockNumberErr -} - -// RetrieveGapsInData mock method -func (mcr *CIDRetriever) RetrieveGapsInData(int) ([]shared.Gap, error) { - mcr.CalledTimes++ - return mcr.GapsToRetrieve, mcr.GapsToRetrieveErr -} - -// SetGapsToRetrieve mock method -func (mcr *CIDRetriever) SetGapsToRetrieve(gaps []shared.Gap) { - if mcr.GapsToRetrieve == nil { - mcr.GapsToRetrieve = make([]shared.Gap, 0) - } - mcr.GapsToRetrieve = append(mcr.GapsToRetrieve, gaps...) -} - -func (mcr *CIDRetriever) Database() *postgres.DB { - panic("implement me") -} diff --git a/pkg/shared/mocks/streamer.go b/pkg/shared/mocks/streamer.go deleted file mode 100644 index daf683eb..00000000 --- a/pkg/shared/mocks/streamer.go +++ /dev/null @@ -1,43 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package mocks - -import ( - "github.com/ethereum/go-ethereum/rpc" - "github.com/vulcanize/ipld-eth-server/pkg/shared" -) - -// PayloadStreamer mock struct -type PayloadStreamer struct { - PassedPayloadChan chan shared.RawChainData - ReturnSub *rpc.ClientSubscription - ReturnErr error - StreamPayloads []shared.RawChainData -} - -// Stream mock method -func (sds *PayloadStreamer) Stream(payloadChan chan shared.RawChainData) (shared.ClientSubscription, error) { - sds.PassedPayloadChan = payloadChan - - go func() { - for _, payload := range sds.StreamPayloads { - sds.PassedPayloadChan <- payload - } - }() - - return sds.ReturnSub, sds.ReturnErr -} diff --git a/test_config/test_config.go b/test_config/test_config.go index 7236422e..88678dd9 100644 --- a/test_config/test_config.go +++ b/test_config/test_config.go @@ -22,10 +22,10 @@ import ( "github.com/sirupsen/logrus" "github.com/spf13/viper" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/config" + "github.com/vulcanize/ipfs-blockchain-watcher/pkg/postgres" ) -var DBConfig config.Database +var DBConfig postgres.Config func init() { setTestConfig() @@ -53,7 +53,7 @@ func setTestConfig() { port := vip.GetInt("database.port") name := vip.GetString("database.name") - DBConfig = config.Database{ + DBConfig = postgres.Config{ Hostname: hn, Name: name, Port: port, diff --git a/utils/utilities_suite_test.go b/utils/utilities_suite_test.go deleted file mode 100644 index 5095a048..00000000 --- a/utils/utilities_suite_test.go +++ /dev/null @@ -1,36 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package utils_test - -import ( - "io/ioutil" - "testing" - - "github.com/sirupsen/logrus" - - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" -) - -func TestShared(t *testing.T) { - RegisterFailHandler(Fail) - RunSpecs(t, "Shared Utilities Suite") -} - -var _ = BeforeSuite(func() { - logrus.SetOutput(ioutil.Discard) -}) diff --git a/utils/utils.go b/utils/utils.go deleted file mode 100644 index 56771295..00000000 --- a/utils/utils.go +++ /dev/null @@ -1,89 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package utils - -import ( - "errors" - - "github.com/sirupsen/logrus" - - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/config" - "github.com/vulcanize/ipld-eth-server/pkg/node" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/postgres" - "github.com/vulcanize/ipld-eth-server/pkg/shared" -) - -func LoadPostgres(database config.Database, node node.Node) postgres.DB { - db, err := postgres.NewDB(database, node) - if err != nil { - logrus.Fatal("Error loading postgres: ", err) - } - return *db -} - -// GetBlockHeightBins splits a block range up into bins of block heights of the given batch size -func GetBlockHeightBins(startingBlock, endingBlock, batchSize uint64) ([][]uint64, error) { - if endingBlock < startingBlock { - return nil, errors.New("backfill: ending block number needs to be greater than starting block number") - } - if batchSize == 0 { - return nil, errors.New("backfill: batchsize needs to be greater than zero") - } - length := endingBlock - startingBlock + 1 - numberOfBins := length / batchSize - if length%batchSize != 0 { - numberOfBins++ - } - blockRangeBins := make([][]uint64, numberOfBins) - for i := range blockRangeBins { - nextBinStart := startingBlock + batchSize - blockRange := make([]uint64, 0, nextBinStart-startingBlock+1) - for j := startingBlock; j < nextBinStart && j <= endingBlock; j++ { - blockRange = append(blockRange, j) - } - startingBlock = nextBinStart - blockRangeBins[i] = blockRange - } - return blockRangeBins, nil -} - -// MissingHeightsToGaps returns a slice of gaps from a slice of missing block heights -func MissingHeightsToGaps(heights []uint64) []shared.Gap { - if len(heights) == 0 { - return nil - } - validationGaps := make([]shared.Gap, 0) - start := heights[0] - lastHeight := start - for i, height := range heights[1:] { - if height != lastHeight+1 { - validationGaps = append(validationGaps, shared.Gap{ - Start: start, - Stop: lastHeight, - }) - start = height - } - if i+2 == len(heights) { - validationGaps = append(validationGaps, shared.Gap{ - Start: start, - Stop: height, - }) - } - lastHeight = height - } - return validationGaps -} diff --git a/utils/utils_test.go b/utils/utils_test.go deleted file mode 100644 index b36ac66c..00000000 --- a/utils/utils_test.go +++ /dev/null @@ -1,74 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package utils_test - -import ( - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - - "github.com/vulcanize/ipld-eth-server/utils" -) - -var _ = Describe("GetBlockHeightBins", func() { - It("splits a block range up into bins", func() { - var startingBlock uint64 = 1 - var endingBlock uint64 = 10101 - var batchSize uint64 = 100 - blockRangeBins, err := utils.GetBlockHeightBins(startingBlock, endingBlock, batchSize) - Expect(err).ToNot(HaveOccurred()) - Expect(len(blockRangeBins)).To(Equal(102)) - Expect(blockRangeBins[101]).To(Equal([]uint64{10101})) - - startingBlock = 101 - endingBlock = 10100 - batchSize = 100 - lastBin := make([]uint64, 0) - for i := 10001; i <= 10100; i++ { - lastBin = append(lastBin, uint64(i)) - } - blockRangeBins, err = utils.GetBlockHeightBins(startingBlock, endingBlock, batchSize) - Expect(err).ToNot(HaveOccurred()) - Expect(len(blockRangeBins)).To(Equal(100)) - Expect(blockRangeBins[99]).To(Equal(lastBin)) - - startingBlock = 1 - endingBlock = 1 - batchSize = 100 - blockRangeBins, err = utils.GetBlockHeightBins(startingBlock, endingBlock, batchSize) - Expect(err).ToNot(HaveOccurred()) - Expect(len(blockRangeBins)).To(Equal(1)) - Expect(blockRangeBins[0]).To(Equal([]uint64{1})) - }) - - It("throws an error if the starting block is higher than the ending block", func() { - var startingBlock uint64 = 10102 - var endingBlock uint64 = 10101 - var batchSize uint64 = 100 - _, err := utils.GetBlockHeightBins(startingBlock, endingBlock, batchSize) - Expect(err).To(HaveOccurred()) - Expect(err.Error()).To(ContainSubstring("ending block number needs to be greater than starting block number")) - }) - - It("throws an error if the batch size is zero", func() { - var startingBlock uint64 = 1 - var endingBlock uint64 = 10101 - var batchSize uint64 = 0 - _, err := utils.GetBlockHeightBins(startingBlock, endingBlock, batchSize) - Expect(err).To(HaveOccurred()) - Expect(err.Error()).To(ContainSubstring("batchsize needs to be greater than zero")) - }) -}) From 1dee766014538907f6f8df5254bb4333346c7932 Mon Sep 17 00:00:00 2001 From: Ian Norden Date: Tue, 1 Sep 2020 16:14:37 -0500 Subject: [PATCH 07/12] update readme --- README.md | 188 ++++++----------------------- environments/subscribeExample.toml | 30 +++++ 2 files changed, 64 insertions(+), 154 deletions(-) create mode 100644 environments/subscribeExample.toml diff --git a/README.md b/README.md index 607308b2..e1782ca4 100644 --- a/README.md +++ b/README.md @@ -2,24 +2,23 @@ [![Go Report Card](https://goreportcard.com/badge/github.com/vulcanize/ipld-eth-server)](https://goreportcard.com/report/github.com/vulcanize/ipld-eth-server) -> ipld-eth-server is used to extract, transform, and load all eth or btc data into an IPFS-backing Postgres datastore while generating useful secondary indexes around the data in other Postgres tables +> ipld-eth-server is the server backend for indexed ETH IPLD objects ## Table of Contents 1. [Background](#background) -1. [Architecture](#architecture) 1. [Install](#install) 1. [Usage](#usage) 1. [Contributing](#contributing) 1. [License](#license) ## Background -ipld-eth-server is a collection of interfaces that are used to extract, process, store, and index -all blockchain data in Postgres-IPFS. The raw data indexed by ipld-eth-server serves as the basis for more specific watchers and applications. +NOTE: WIP -Currently the service supports complete processing of all Bitcoin and Ethereum data. +ipld-eth-server is used to service queries against the indexed Ethereum IPLD objects indexed by [ipld-eth-indexer](https://github.com/vulcanize/ipld-eth-indexer). + +It exposes standard Ethereum JSON RPC endpoints on top of the database, in some cases these endpoints can leverage the unique indexes to improve query performance. +Additional, unique endpoints are exposed which utilize the new indexes and state diff data objects. -## Architecture -More details on the design of ipld-eth-server can be found in [here](./documentation/architecture.md) ## Dependencies Minimal build dependencies @@ -28,111 +27,10 @@ Minimal build dependencies * GCC compiler * This repository -Potential external dependencies -* Goose -* Postgres -* Statediffing go-ethereum -* Bitcoin node +External dependency +* Postgres database populated by [ipld-eth-indexer](https://github.com/vulcanize/ipld-eth-indexer) ## Install -1. [Goose](#goose) -1. [Postgres](#postgres) -1. [IPFS](#ipfs) -1. [Blockchain](#blockchain) -1. [Watcher](#watcher) - -### Goose -[goose](https://github.com/pressly/goose) is used for migration management. While it is not necessary to use `goose` for manual setup, it -is required for running the automated tests and is used by the `make migrate` command. - -### Postgres -1. [Install Postgres](https://wiki.postgresql.org/wiki/Detailed_installation_guides) -1. Create a superuser for yourself and make sure `psql --list` works without prompting for a password. -1. `createdb vulcanize_public` -1. `cd $GOPATH/src/github.com/vulcanize/ipld-eth-server` -1. Run the migrations: `make migrate HOST_NAME=localhost NAME=vulcanize_public PORT=5432` - - There are optional vars `USER=username:password` if the database user is not the default user `postgres` and/or a password is present - - To rollback a single step: `make rollback NAME=vulcanize_public` - - To rollback to a certain migration: `make rollback_to MIGRATION=n NAME=vulcanize_public` - - To see status of migrations: `make migration_status NAME=vulcanize_public` - - * See below for configuring additional environments - -In some cases (such as recent Ubuntu systems), it may be necessary to overcome failures of password authentication from -localhost. To allow access on Ubuntu, set localhost connections via hostname, ipv4, and ipv6 from peer/md5 to trust in: /etc/postgresql//pg_hba.conf - -(It should be noted that trusted auth should only be enabled on systems without sensitive data in them: development and local test databases) - -### IPFS -Data is stored in an [IPFS-backing Postgres datastore](https://github.com/ipfs/go-ds-sql). -By default data is written directly to the ipfs blockstore in Postgres; the public.blocks table. -In this case no further IPFS configuration is needed at this time. - -Optionally, ipld-eth-server can be configured to function through an internal ipfs node interface using the flag: `-ipfs-mode=interface`. -Operating through the ipfs interface provides the option to configure a block exchange that can search remotely for IPLD data found missing in the local datastore. -This option is irrelevant in most cases and this mode has some disadvantages, namely: - -1. Environment must have IPFS configured -1. Process will contend with the lockfile at `$IPFS_PATH` -1. Publishing and indexing of data must occur in separate db transactions - -More information for configuring Postgres-IPFS can be found [here](./documentation/ipfs.md) - -### Blockchain -This section describes how to setup an Ethereum or Bitcoin node to serve as a data source for ipld-eth-server - -#### Ethereum -For Ethereum, [a special fork of go-ethereum](https://github.com/vulcanize/go-ethereum/tree/statediff_at_anyblock-1.9.11) is currently *requirde*. -This can be setup as follows. -Skip this step if you already have access to a node that displays the statediffing endpoints. - -Begin by downloading geth and switching to the statediffing branch: - -`GO111MODULE=off go get -d github.com/ethereum/go-ethereum` - -`cd $GOPATH/src/github.com/ethereum/go-ethereum` - -`git remote add vulcanize https://github.com/vulcanize/go-ethereum.git` - -`git fetch vulcanize` - -`git checkout -b statediffing vulcanize/statediff_at_anyblock-1.9.11` - -Now, install this fork of geth (make sure any old versions have been uninstalled/binaries removed first): - -`make geth` - -And run the output binary with statediffing turned on: - -`cd $GOPATH/src/github.com/ethereum/go-ethereum/build/bin` - -`./geth --syncmode=full --statediff --ws` - -Note: to access historical data (perform `backFill`) the node will need to operate as an archival node (`--gcmode=archive`) with rpc endpoints -exposed (`--rpc --rpcapi=eth,statediff,net`) - -Warning: There is a good chance even a fully synced archive node has incomplete historical state data to some degree - -The output from geth should mention that it is `Starting statediff service` and block synchronization should begin shortly thereafter. -Note that until it receives a subscriber, the statediffing process does nothing but wait for one. Once a subscription is received, this -will be indicated in the output and the node will begin processing and sending statediffs. - -Also in the output will be the endpoints that will be used to interface with the node. -The default ws url is "127.0.0.1:8546" and the default http url is "127.0.0.1:8545". -These values will be used as the `ethereum.wsPath` and `ethereum.httpPath` in the config, respectively. - -#### Bitcoin -For Bitcoin, ipld-eth-server is able to operate entirely through the universally exposed JSON-RPC interfaces. -This means any of the standard full nodes can be used (e.g. bitcoind, btcd) as the data source. - -Point at a remote node or set one up locally using the instructions for [bitcoind](https://github.com/bitcoin/bitcoin) and [btcd](https://github.com/btcsuite/btcd). - -The default http url is "127.0.0.1:8332". We will use the http endpoint as both the `bitcoin.wsPath` and `bitcoin.httpPath` -(bitcoind does not support websocket endpoints, the watcher currently uses a "subscription" wrapper around the http endpoints) - -### Watcher -Finally, setup the watcher process itself. - Start by downloading ipld-eth-server and moving into the repo: `GO111MODULE=off go get -d github.com/vulcanize/ipld-eth-server` @@ -146,68 +44,50 @@ Then, build the binary: ## Usage After building the binary, run as -`./ipld-eth-server watch --config=` +`./ipld-eth-server serve --config=` ### Configuration -Below is the set of universal config parameters for the ipld-eth-server command, in .toml form, with the respective environmental variables commented to the side. -This set of parameters needs to be set no matter the chain type. +Below is the set of parameters for the ipld-eth-server command, in .toml form, with the respective environmental variables commented to the side. +The corresponding CLI flags can be found with the `./ipld-eth-server serve --help` command. ```toml [database] name = "vulcanize_public" # $DATABASE_NAME hostname = "localhost" # $DATABASE_HOSTNAME port = 5432 # $DATABASE_PORT - user = "vdbm" # $DATABASE_USER + user = "postgres" # $DATABASE_USER password = "" # $DATABASE_PASSWORD -[watcher] - chain = "bitcoin" # $SUPERNODE_CHAIN - server = true # $SUPERNODE_SERVER - ipcPath = "~/.vulcanize/vulcanize.ipc" # $SUPERNODE_IPC_PATH - wsPath = "127.0.0.1:8082" # $SUPERNODE_WS_PATH - httpPath = "127.0.0.1:8083" # $SUPERNODE_HTTP_PATH - sync = true # $SUPERNODE_SYNC - workers = 1 # $SUPERNODE_WORKERS - backFill = true # $SUPERNODE_BACKFILL - frequency = 45 # $SUPERNODE_FREQUENCY - batchSize = 1 # $SUPERNODE_BATCH_SIZE - batchNumber = 50 # $SUPERNODE_BATCH_NUMBER - timeout = 300 # $HTTP_TIMEOUT - validationLevel = 1 # $SUPERNODE_VALIDATION_LEVEL +[log] + level = "info" # $LOGRUS_LEVEL + +[server] + ipcPath = "~/.vulcanize/vulcanize.ipc" # $SERVER_IPC_PATH + wsPath = "127.0.0.1:8081" # $SERVER_WS_PATH + httpPath = "127.0.0.1:8082" # $SERVER_HTTP_PATH ``` -Additional parameters need to be set depending on the specific chain. +The `database` fields are for connecting to a Postgres database that has been/is being populated by [ipld-eth-indexer](https://github.com/vulcanize/ipld-eth-indexer). +The `server` fields set the paths for exposing the ipld-eth-server endpoints -For Bitcoin: -```toml -[bitcoin] - wsPath = "127.0.0.1:8332" # $BTC_WS_PATH - httpPath = "127.0.0.1:8332" # $BTC_HTTP_PATH - pass = "password" # $BTC_NODE_PASSWORD - user = "username" # $BTC_NODE_USER - nodeID = "ocd0" # $BTC_NODE_ID - clientName = "Omnicore" # $BTC_CLIENT_NAME - genesisBlock = "000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f" # $BTC_GENESIS_BLOCK - networkID = "0xD9B4BEF9" # $BTC_NETWORK_ID -``` +### Endpoints +#### IPLD subscription +TODO: Port the IPLD RPC subscription endpoints after the decoupling -For Ethereum: +#### Ethereum JSON-RPC +ipld-eth-server currently recapitulates portions of the Ethereum JSON-RPC api standard. -```toml -[ethereum] - wsPath = "127.0.0.1:8546" # $ETH_WS_PATH - httpPath = "127.0.0.1:8545" # $ETH_HTTP_PATH - nodeID = "arch1" # $ETH_NODE_ID - clientName = "Geth" # $ETH_CLIENT_NAME - genesisBlock = "0xd4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3" # $ETH_GENESIS_BLOCK - networkID = "1" # $ETH_NETWORK_ID - chainID = "1" # $ETH_CHAIN_ID -``` +The currently supported standard endpoints are: +`eth_blockNumber` +`eth_getLogs` +`eth_getHeaderByNumber` +`eth_getBlockByNumber` +`eth_getBlockByHash` +`eth_getTransactionByHash` -### Exposing the data -A number of different APIs for remote access to ipld-eth-server data can be exposed, these are discussed in more detail [here](./documentation/apis.md) +TODO: Add the rest of the standard endpoints add unique endpoints (e.g. getSlice) ### Testing `make test` will run the unit tests diff --git a/environments/subscribeExample.toml b/environments/subscribeExample.toml new file mode 100644 index 00000000..14a82379 --- /dev/null +++ b/environments/subscribeExample.toml @@ -0,0 +1,30 @@ +[watcher] + [watcher.ethSubscription] + historicalData = false + historicalDataOnly = false + startingBlock = 0 + endingBlock = 0 + wsPath = "ws://127.0.0.1:8080" + [watcher.ethSubscription.headerFilter] + off = false + uncles = false + [watcher.ethSubscription.txFilter] + off = false + src = [] + dst = [] + [watcher.ethSubscription.receiptFilter] + off = false + contracts = [] + topic0s = [] + topic1s = [] + topic2s = [] + topic3s = [] + [watcher.ethSubscription.stateFilter] + off = false + addresses = [] + intermediateNodes = false + [watcher.ethSubscription.storageFilter] + off = true + addresses = [] + storageKeys = [] + intermediateNodes = false \ No newline at end of file From 97587705796bf87d5cf1572e0521b6910e7418cc Mon Sep 17 00:00:00 2001 From: Ian Norden Date: Wed, 2 Sep 2020 10:12:47 -0500 Subject: [PATCH 08/12] update migrations and schema --- .../00008_create_eth_state_cids_table.sql | 2 +- .../00010_create_eth_state_accouts_table.sql | 2 +- .../00011_create_postgraphile_comments.sql | 3 - db/schema.sql | 317 +----------------- 4 files changed, 4 insertions(+), 320 deletions(-) diff --git a/db/migrations/00008_create_eth_state_cids_table.sql b/db/migrations/00008_create_eth_state_cids_table.sql index e0bf6e57..fb2e291f 100644 --- a/db/migrations/00008_create_eth_state_cids_table.sql +++ b/db/migrations/00008_create_eth_state_cids_table.sql @@ -6,7 +6,7 @@ CREATE TABLE eth.state_cids ( cid TEXT NOT NULL, mh_key TEXT NOT NULL REFERENCES public.blocks (key) ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED, state_path BYTEA, - node_type INTEGER, + node_type INTEGER NOT NULL, diff BOOLEAN NOT NULL DEFAULT FALSE, UNIQUE (header_id, state_path) ); diff --git a/db/migrations/00010_create_eth_state_accouts_table.sql b/db/migrations/00010_create_eth_state_accouts_table.sql index 322d948f..d88111df 100644 --- a/db/migrations/00010_create_eth_state_accouts_table.sql +++ b/db/migrations/00010_create_eth_state_accouts_table.sql @@ -1,7 +1,7 @@ -- +goose Up CREATE TABLE eth.state_accounts ( id SERIAL PRIMARY KEY, - state_id INTEGER NOT NULL REFERENCES eth.state_cids (id) ON DELETE CASCADE, + state_id INTEGER NOT NULL REFERENCES eth.state_cids (id) ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED, balance NUMERIC NOT NULL, nonce INTEGER NOT NULL, code_hash BYTEA NOT NULL, diff --git a/db/migrations/00011_create_postgraphile_comments.sql b/db/migrations/00011_create_postgraphile_comments.sql index c426efd1..16a051f4 100644 --- a/db/migrations/00011_create_postgraphile_comments.sql +++ b/db/migrations/00011_create_postgraphile_comments.sql @@ -1,9 +1,6 @@ -- +goose Up COMMENT ON TABLE public.nodes IS E'@name NodeInfo'; -COMMENT ON TABLE btc.header_cids IS E'@name BtcHeaderCids'; -COMMENT ON TABLE btc.transaction_cids IS E'@name BtcTransactionCids'; COMMENT ON TABLE eth.transaction_cids IS E'@name EthTransactionCids'; COMMENT ON TABLE eth.header_cids IS E'@name EthHeaderCids'; COMMENT ON COLUMN public.nodes.node_id IS E'@name ChainNodeID'; COMMENT ON COLUMN eth.header_cids.node_id IS E'@name EthNodeID'; -COMMENT ON COLUMN btc.header_cids.node_id IS E'@name BtcNodeID'; \ No newline at end of file diff --git a/db/schema.sql b/db/schema.sql index 0f167099..709bb012 100644 --- a/db/schema.sql +++ b/db/schema.sql @@ -16,13 +16,6 @@ SET xmloption = content; SET client_min_messages = warning; SET row_security = off; --- --- Name: btc; Type: SCHEMA; Schema: -; Owner: - --- - -CREATE SCHEMA btc; - - -- -- Name: eth; Type: SCHEMA; Schema: -; Owner: - -- @@ -34,172 +27,6 @@ SET default_tablespace = ''; SET default_table_access_method = heap; --- --- Name: header_cids; Type: TABLE; Schema: btc; Owner: - --- - -CREATE TABLE btc.header_cids ( - id integer NOT NULL, - block_number bigint NOT NULL, - block_hash character varying(66) NOT NULL, - parent_hash character varying(66) NOT NULL, - cid text NOT NULL, - mh_key text NOT NULL, - "timestamp" numeric NOT NULL, - bits bigint NOT NULL, - node_id integer NOT NULL, - times_validated integer DEFAULT 1 NOT NULL -); - - --- --- Name: TABLE header_cids; Type: COMMENT; Schema: btc; Owner: - --- - -COMMENT ON TABLE btc.header_cids IS '@name BtcHeaderCids'; - - --- --- Name: COLUMN header_cids.node_id; Type: COMMENT; Schema: btc; Owner: - --- - -COMMENT ON COLUMN btc.header_cids.node_id IS '@name BtcNodeID'; - - --- --- Name: header_cids_id_seq; Type: SEQUENCE; Schema: btc; Owner: - --- - -CREATE SEQUENCE btc.header_cids_id_seq - AS integer - START WITH 1 - INCREMENT BY 1 - NO MINVALUE - NO MAXVALUE - CACHE 1; - - --- --- Name: header_cids_id_seq; Type: SEQUENCE OWNED BY; Schema: btc; Owner: - --- - -ALTER SEQUENCE btc.header_cids_id_seq OWNED BY btc.header_cids.id; - - --- --- Name: transaction_cids; Type: TABLE; Schema: btc; Owner: - --- - -CREATE TABLE btc.transaction_cids ( - id integer NOT NULL, - header_id integer NOT NULL, - index integer NOT NULL, - tx_hash character varying(66) NOT NULL, - cid text NOT NULL, - mh_key text NOT NULL, - segwit boolean NOT NULL, - witness_hash character varying(66) -); - - --- --- Name: TABLE transaction_cids; Type: COMMENT; Schema: btc; Owner: - --- - -COMMENT ON TABLE btc.transaction_cids IS '@name BtcTransactionCids'; - - --- --- Name: transaction_cids_id_seq; Type: SEQUENCE; Schema: btc; Owner: - --- - -CREATE SEQUENCE btc.transaction_cids_id_seq - AS integer - START WITH 1 - INCREMENT BY 1 - NO MINVALUE - NO MAXVALUE - CACHE 1; - - --- --- Name: transaction_cids_id_seq; Type: SEQUENCE OWNED BY; Schema: btc; Owner: - --- - -ALTER SEQUENCE btc.transaction_cids_id_seq OWNED BY btc.transaction_cids.id; - - --- --- Name: tx_inputs; Type: TABLE; Schema: btc; Owner: - --- - -CREATE TABLE btc.tx_inputs ( - id integer NOT NULL, - tx_id integer NOT NULL, - index integer NOT NULL, - witness character varying[], - sig_script bytea NOT NULL, - outpoint_tx_hash character varying(66) NOT NULL, - outpoint_index numeric NOT NULL -); - - --- --- Name: tx_inputs_id_seq; Type: SEQUENCE; Schema: btc; Owner: - --- - -CREATE SEQUENCE btc.tx_inputs_id_seq - AS integer - START WITH 1 - INCREMENT BY 1 - NO MINVALUE - NO MAXVALUE - CACHE 1; - - --- --- Name: tx_inputs_id_seq; Type: SEQUENCE OWNED BY; Schema: btc; Owner: - --- - -ALTER SEQUENCE btc.tx_inputs_id_seq OWNED BY btc.tx_inputs.id; - - --- --- Name: tx_outputs; Type: TABLE; Schema: btc; Owner: - --- - -CREATE TABLE btc.tx_outputs ( - id integer NOT NULL, - tx_id integer NOT NULL, - index integer NOT NULL, - value bigint NOT NULL, - pk_script bytea NOT NULL, - script_class integer NOT NULL, - addresses character varying(66)[], - required_sigs integer NOT NULL -); - - --- --- Name: tx_outputs_id_seq; Type: SEQUENCE; Schema: btc; Owner: - --- - -CREATE SEQUENCE btc.tx_outputs_id_seq - AS integer - START WITH 1 - INCREMENT BY 1 - NO MINVALUE - NO MAXVALUE - CACHE 1; - - --- --- Name: tx_outputs_id_seq; Type: SEQUENCE OWNED BY; Schema: btc; Owner: - --- - -ALTER SEQUENCE btc.tx_outputs_id_seq OWNED BY btc.tx_outputs.id; - - -- -- Name: header_cids; Type: TABLE; Schema: eth; Owner: - -- @@ -342,7 +169,7 @@ CREATE TABLE eth.state_cids ( cid text NOT NULL, mh_key text NOT NULL, state_path bytea, - node_type integer, + node_type integer NOT NULL, diff boolean DEFAULT false NOT NULL ); @@ -572,34 +399,6 @@ CREATE SEQUENCE public.nodes_id_seq ALTER SEQUENCE public.nodes_id_seq OWNED BY public.nodes.id; --- --- Name: header_cids id; Type: DEFAULT; Schema: btc; Owner: - --- - -ALTER TABLE ONLY btc.header_cids ALTER COLUMN id SET DEFAULT nextval('btc.header_cids_id_seq'::regclass); - - --- --- Name: transaction_cids id; Type: DEFAULT; Schema: btc; Owner: - --- - -ALTER TABLE ONLY btc.transaction_cids ALTER COLUMN id SET DEFAULT nextval('btc.transaction_cids_id_seq'::regclass); - - --- --- Name: tx_inputs id; Type: DEFAULT; Schema: btc; Owner: - --- - -ALTER TABLE ONLY btc.tx_inputs ALTER COLUMN id SET DEFAULT nextval('btc.tx_inputs_id_seq'::regclass); - - --- --- Name: tx_outputs id; Type: DEFAULT; Schema: btc; Owner: - --- - -ALTER TABLE ONLY btc.tx_outputs ALTER COLUMN id SET DEFAULT nextval('btc.tx_outputs_id_seq'::regclass); - - -- -- Name: header_cids id; Type: DEFAULT; Schema: eth; Owner: - -- @@ -663,70 +462,6 @@ ALTER TABLE ONLY public.goose_db_version ALTER COLUMN id SET DEFAULT nextval('pu ALTER TABLE ONLY public.nodes ALTER COLUMN id SET DEFAULT nextval('public.nodes_id_seq'::regclass); --- --- Name: header_cids header_cids_block_number_block_hash_key; Type: CONSTRAINT; Schema: btc; Owner: - --- - -ALTER TABLE ONLY btc.header_cids - ADD CONSTRAINT header_cids_block_number_block_hash_key UNIQUE (block_number, block_hash); - - --- --- Name: header_cids header_cids_pkey; Type: CONSTRAINT; Schema: btc; Owner: - --- - -ALTER TABLE ONLY btc.header_cids - ADD CONSTRAINT header_cids_pkey PRIMARY KEY (id); - - --- --- Name: transaction_cids transaction_cids_pkey; Type: CONSTRAINT; Schema: btc; Owner: - --- - -ALTER TABLE ONLY btc.transaction_cids - ADD CONSTRAINT transaction_cids_pkey PRIMARY KEY (id); - - --- --- Name: transaction_cids transaction_cids_tx_hash_key; Type: CONSTRAINT; Schema: btc; Owner: - --- - -ALTER TABLE ONLY btc.transaction_cids - ADD CONSTRAINT transaction_cids_tx_hash_key UNIQUE (tx_hash); - - --- --- Name: tx_inputs tx_inputs_pkey; Type: CONSTRAINT; Schema: btc; Owner: - --- - -ALTER TABLE ONLY btc.tx_inputs - ADD CONSTRAINT tx_inputs_pkey PRIMARY KEY (id); - - --- --- Name: tx_inputs tx_inputs_tx_id_index_key; Type: CONSTRAINT; Schema: btc; Owner: - --- - -ALTER TABLE ONLY btc.tx_inputs - ADD CONSTRAINT tx_inputs_tx_id_index_key UNIQUE (tx_id, index); - - --- --- Name: tx_outputs tx_outputs_pkey; Type: CONSTRAINT; Schema: btc; Owner: - --- - -ALTER TABLE ONLY btc.tx_outputs - ADD CONSTRAINT tx_outputs_pkey PRIMARY KEY (id); - - --- --- Name: tx_outputs tx_outputs_tx_id_index_key; Type: CONSTRAINT; Schema: btc; Owner: - --- - -ALTER TABLE ONLY btc.tx_outputs - ADD CONSTRAINT tx_outputs_tx_id_index_key UNIQUE (tx_id, index); - - -- -- Name: header_cids header_cids_block_number_block_hash_key; Type: CONSTRAINT; Schema: eth; Owner: - -- @@ -871,54 +606,6 @@ ALTER TABLE ONLY public.nodes ADD CONSTRAINT nodes_pkey PRIMARY KEY (id); --- --- Name: header_cids header_cids_mh_key_fkey; Type: FK CONSTRAINT; Schema: btc; Owner: - --- - -ALTER TABLE ONLY btc.header_cids - ADD CONSTRAINT header_cids_mh_key_fkey FOREIGN KEY (mh_key) REFERENCES public.blocks(key) ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED; - - --- --- Name: header_cids header_cids_node_id_fkey; Type: FK CONSTRAINT; Schema: btc; Owner: - --- - -ALTER TABLE ONLY btc.header_cids - ADD CONSTRAINT header_cids_node_id_fkey FOREIGN KEY (node_id) REFERENCES public.nodes(id) ON DELETE CASCADE; - - --- --- Name: transaction_cids transaction_cids_header_id_fkey; Type: FK CONSTRAINT; Schema: btc; Owner: - --- - -ALTER TABLE ONLY btc.transaction_cids - ADD CONSTRAINT transaction_cids_header_id_fkey FOREIGN KEY (header_id) REFERENCES btc.header_cids(id) ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED; - - --- --- Name: transaction_cids transaction_cids_mh_key_fkey; Type: FK CONSTRAINT; Schema: btc; Owner: - --- - -ALTER TABLE ONLY btc.transaction_cids - ADD CONSTRAINT transaction_cids_mh_key_fkey FOREIGN KEY (mh_key) REFERENCES public.blocks(key) ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED; - - --- --- Name: tx_inputs tx_inputs_tx_id_fkey; Type: FK CONSTRAINT; Schema: btc; Owner: - --- - -ALTER TABLE ONLY btc.tx_inputs - ADD CONSTRAINT tx_inputs_tx_id_fkey FOREIGN KEY (tx_id) REFERENCES btc.transaction_cids(id) ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED; - - --- --- Name: tx_outputs tx_outputs_tx_id_fkey; Type: FK CONSTRAINT; Schema: btc; Owner: - --- - -ALTER TABLE ONLY btc.tx_outputs - ADD CONSTRAINT tx_outputs_tx_id_fkey FOREIGN KEY (tx_id) REFERENCES btc.transaction_cids(id) ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED; - - -- -- Name: header_cids header_cids_mh_key_fkey; Type: FK CONSTRAINT; Schema: eth; Owner: - -- @@ -956,7 +643,7 @@ ALTER TABLE ONLY eth.receipt_cids -- ALTER TABLE ONLY eth.state_accounts - ADD CONSTRAINT state_accounts_state_id_fkey FOREIGN KEY (state_id) REFERENCES eth.state_cids(id) ON DELETE CASCADE; + ADD CONSTRAINT state_accounts_state_id_fkey FOREIGN KEY (state_id) REFERENCES eth.state_cids(id) ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED; -- From dbcb2c0cf7e33e729c2a8ef58e744f1473e7c676 Mon Sep 17 00:00:00 2001 From: Ian Norden Date: Wed, 2 Sep 2020 10:19:25 -0500 Subject: [PATCH 09/12] updat dep and serve cmd --- cmd/serve.go | 85 +++++++------- pkg/eth/api.go | 2 +- pkg/eth/api_test.go | 6 +- pkg/eth/backend.go | 4 +- pkg/eth/cid_retriever.go | 4 +- pkg/eth/cid_retriever_test.go | 210 +--------------------------------- pkg/eth/filterer.go | 6 +- pkg/eth/filterer_test.go | 4 +- pkg/eth/ipld_fetcher.go | 6 +- pkg/eth/ipld_fetcher_test.go | 6 +- pkg/eth/test_helpers.go | 4 +- pkg/serve/api.go | 8 -- pkg/serve/config.go | 9 +- pkg/serve/service.go | 10 +- pkg/shared/env.go | 2 +- pkg/shared/functions.go | 2 +- pkg/shared/test_helpers.go | 8 +- test_config/test_config.go | 2 +- 18 files changed, 82 insertions(+), 296 deletions(-) diff --git a/cmd/serve.go b/cmd/serve.go index e0ab26e9..c3d6b4a1 100644 --- a/cmd/serve.go +++ b/cmd/serve.go @@ -18,22 +18,22 @@ package cmd import ( "os" "os/signal" - s "sync" + "sync" "github.com/ethereum/go-ethereum/rpc" log "github.com/sirupsen/logrus" "github.com/spf13/cobra" "github.com/spf13/viper" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/eth" + "github.com/vulcanize/ipld-eth-indexer/pkg/eth" - "github.com/vulcanize/ipld-eth-server/pkg/serve" + s "github.com/vulcanize/ipld-eth-server/pkg/serve" v "github.com/vulcanize/ipld-eth-server/version" ) -// watchCmd represents the watch command -var watchCmd = &cobra.Command{ - Use: "watch", +// serveCmd represents the serve command +var serveCmd = &cobra.Command{ + Use: "serve", Short: "serve chain data from PG-IPFS", Long: `This command configures a VulcanizeDB ipld-eth-server. @@ -41,82 +41,81 @@ var watchCmd = &cobra.Command{ Run: func(cmd *cobra.Command, args []string) { subCommand = cmd.CalledAs() logWithCommand = *log.WithField("SubCommand", subCommand) - watch() + serve() }, } -func watch() { +func serve() { logWithCommand.Infof("running ipld-eth-server version: %s", v.VersionWithMeta) var forwardPayloadChan chan eth.ConvertedPayload - wg := new(s.WaitGroup) - logWithCommand.Debug("loading watcher configuration variables") - watcherConfig, err := serve.NewConfig() + wg := new(sync.WaitGroup) + logWithCommand.Debug("loading server configuration variables") + serverConfig, err := s.NewConfig() if err != nil { logWithCommand.Fatal(err) } - logWithCommand.Infof("watcher config: %+v", watcherConfig) - logWithCommand.Debug("initializing new watcher service") - s, err := serve.NewServer(watcherConfig) + logWithCommand.Infof("server config: %+v", serverConfig) + logWithCommand.Debug("initializing new server service") + server, err := s.NewServer(serverConfig) if err != nil { logWithCommand.Fatal(err) } - logWithCommand.Info("starting up watcher servers") - forwardPayloadChan = make(chan eth.ConvertedPayload, serve.PayloadChanBufferSize) - s.Serve(wg, forwardPayloadChan) - if err := startServers(s, watcherConfig); err != nil { + logWithCommand.Info("starting up server servers") + forwardPayloadChan = make(chan eth.ConvertedPayload, s.PayloadChanBufferSize) + server.Serve(wg, forwardPayloadChan) + if err := startServers(server, serverConfig); err != nil { logWithCommand.Fatal(err) } - shutdown := make(chan os.Signal) signal.Notify(shutdown, os.Interrupt) <-shutdown - s.Stop() + server.Stop() wg.Wait() } -func startServers(watcher serve.Server, settings *serve.Config) error { +func startServers(server s.Server, settings *s.Config) error { logWithCommand.Debug("starting up IPC server") - _, _, err := rpc.StartIPCEndpoint(settings.IPCEndpoint, watcher.APIs()) + _, _, err := rpc.StartIPCEndpoint(settings.IPCEndpoint, server.APIs()) if err != nil { return err } logWithCommand.Debug("starting up WS server") - _, _, err = rpc.StartWSEndpoint(settings.WSEndpoint, watcher.APIs(), []string{"vdb"}, nil, true) + _, _, err = rpc.StartWSEndpoint(settings.WSEndpoint, server.APIs(), []string{"vdb"}, nil, true) if err != nil { return err } logWithCommand.Debug("starting up HTTP server") - _, _, err = rpc.StartHTTPEndpoint(settings.HTTPEndpoint, watcher.APIs(), []string{"eth"}, nil, nil, rpc.HTTPTimeouts{}) + _, _, err = rpc.StartHTTPEndpoint(settings.HTTPEndpoint, server.APIs(), []string{"eth"}, nil, nil, rpc.HTTPTimeouts{}) return err } func init() { - rootCmd.AddCommand(watchCmd) + rootCmd.AddCommand(serveCmd) // flags for all config variables - watchCmd.PersistentFlags().String("watcher-ws-path", "", "vdb server ws path") - watchCmd.PersistentFlags().String("watcher-http-path", "", "vdb server http path") - watchCmd.PersistentFlags().String("watcher-ipc-path", "", "vdb server ipc path") + serveCmd.PersistentFlags().String("server-ws-path", "", "vdb server ws path") + serveCmd.PersistentFlags().String("server-http-path", "", "vdb server http path") + serveCmd.PersistentFlags().String("server-ipc-path", "", "vdb server ipc path") - watchCmd.PersistentFlags().String("eth-ws-path", "", "ws url for ethereum node") - watchCmd.PersistentFlags().String("eth-http-path", "", "http url for ethereum node") - watchCmd.PersistentFlags().String("eth-node-id", "", "eth node id") - watchCmd.PersistentFlags().String("eth-client-name", "", "eth client name") - watchCmd.PersistentFlags().String("eth-genesis-block", "", "eth genesis block hash") - watchCmd.PersistentFlags().String("eth-network-id", "", "eth network id") + serveCmd.PersistentFlags().String("eth-ws-path", "", "ws url for ethereum node") + serveCmd.PersistentFlags().String("eth-http-path", "", "http url for ethereum node") + serveCmd.PersistentFlags().String("eth-node-id", "", "eth node id") + serveCmd.PersistentFlags().String("eth-client-name", "", "eth client name") + serveCmd.PersistentFlags().String("eth-genesis-block", "", "eth genesis block hash") + serveCmd.PersistentFlags().String("eth-network-id", "", "eth network id") // and their bindings - viper.BindPFlag("watcher.wsPath", watchCmd.PersistentFlags().Lookup("watcher-ws-path")) - viper.BindPFlag("watcher.httpPath", watchCmd.PersistentFlags().Lookup("watcher-http-path")) - viper.BindPFlag("watcher.ipcPath", watchCmd.PersistentFlags().Lookup("watcher-ipc-path")) + viper.BindPFlag("server.wsPath", serveCmd.PersistentFlags().Lookup("server-ws-path")) + viper.BindPFlag("server.httpPath", serveCmd.PersistentFlags().Lookup("server-http-path")) + viper.BindPFlag("server.ipcPath", serveCmd.PersistentFlags().Lookup("server-ipc-path")) - viper.BindPFlag("ethereum.wsPath", watchCmd.PersistentFlags().Lookup("eth-ws-path")) - viper.BindPFlag("ethereum.httpPath", watchCmd.PersistentFlags().Lookup("eth-http-path")) - viper.BindPFlag("ethereum.nodeID", watchCmd.PersistentFlags().Lookup("eth-node-id")) - viper.BindPFlag("ethereum.clientName", watchCmd.PersistentFlags().Lookup("eth-client-name")) - viper.BindPFlag("ethereum.genesisBlock", watchCmd.PersistentFlags().Lookup("eth-genesis-block")) - viper.BindPFlag("ethereum.networkID", watchCmd.PersistentFlags().Lookup("eth-network-id")) + viper.BindPFlag("ethereum.wsPath", serveCmd.PersistentFlags().Lookup("eth-ws-path")) + viper.BindPFlag("ethereum.httpPath", serveCmd.PersistentFlags().Lookup("eth-http-path")) + viper.BindPFlag("ethereum.nodeID", serveCmd.PersistentFlags().Lookup("eth-node-id")) + viper.BindPFlag("ethereum.clientName", serveCmd.PersistentFlags().Lookup("eth-client-name")) + viper.BindPFlag("ethereum.genesisBlock", serveCmd.PersistentFlags().Lookup("eth-genesis-block")) + viper.BindPFlag("ethereum.networkID", serveCmd.PersistentFlags().Lookup("eth-network-id")) } diff --git a/pkg/eth/api.go b/pkg/eth/api.go index 491a13a3..a2b6b4ff 100644 --- a/pkg/eth/api.go +++ b/pkg/eth/api.go @@ -20,7 +20,7 @@ import ( "context" "math/big" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/eth" + "github.com/vulcanize/ipld-eth-indexer/pkg/eth" "github.com/vulcanize/ipld-eth-server/pkg/shared" "github.com/ethereum/go-ethereum" diff --git a/pkg/eth/api_test.go b/pkg/eth/api_test.go index e48fff48..fc4a67fc 100644 --- a/pkg/eth/api_test.go +++ b/pkg/eth/api_test.go @@ -29,9 +29,9 @@ import ( . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" - eth2 "github.com/vulcanize/ipfs-blockchain-watcher/pkg/eth" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/eth/mocks" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/postgres" + eth2 "github.com/vulcanize/ipld-eth-indexer/pkg/eth" + "github.com/vulcanize/ipld-eth-indexer/pkg/eth/mocks" + "github.com/vulcanize/ipld-eth-indexer/pkg/postgres" "github.com/vulcanize/ipld-eth-server/pkg/eth" "github.com/vulcanize/ipld-eth-server/pkg/shared" diff --git a/pkg/eth/backend.go b/pkg/eth/backend.go index cf972e15..6b3e9eb2 100644 --- a/pkg/eth/backend.go +++ b/pkg/eth/backend.go @@ -30,8 +30,8 @@ import ( "github.com/ethereum/go-ethereum/rpc" ipfsethdb "github.com/vulcanize/pg-ipfs-ethdb" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/ipfs" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/postgres" + "github.com/vulcanize/ipld-eth-indexer/pkg/ipfs" + "github.com/vulcanize/ipld-eth-indexer/pkg/postgres" "github.com/vulcanize/ipld-eth-server/pkg/shared" ) diff --git a/pkg/eth/cid_retriever.go b/pkg/eth/cid_retriever.go index 3d83f62e..7ae49a00 100644 --- a/pkg/eth/cid_retriever.go +++ b/pkg/eth/cid_retriever.go @@ -26,8 +26,8 @@ import ( "github.com/lib/pq" log "github.com/sirupsen/logrus" - eth2 "github.com/vulcanize/ipfs-blockchain-watcher/pkg/eth" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/postgres" + eth2 "github.com/vulcanize/ipld-eth-indexer/pkg/eth" + "github.com/vulcanize/ipld-eth-indexer/pkg/postgres" "github.com/vulcanize/ipld-eth-server/pkg/shared" ) diff --git a/pkg/eth/cid_retriever_test.go b/pkg/eth/cid_retriever_test.go index 24ef1e7f..555d7fec 100644 --- a/pkg/eth/cid_retriever_test.go +++ b/pkg/eth/cid_retriever_test.go @@ -24,9 +24,9 @@ import ( . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" - eth2 "github.com/vulcanize/ipfs-blockchain-watcher/pkg/eth" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/eth/mocks" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/postgres" + eth2 "github.com/vulcanize/ipld-eth-indexer/pkg/eth" + "github.com/vulcanize/ipld-eth-indexer/pkg/eth/mocks" + "github.com/vulcanize/ipld-eth-indexer/pkg/postgres" "github.com/vulcanize/ipld-eth-server/pkg/eth" "github.com/vulcanize/ipld-eth-server/pkg/shared" @@ -465,210 +465,6 @@ var _ = Describe("Retriever", func() { Expect(num).To(Equal(int64(1010101))) }) }) - - Describe("RetrieveGapsInData", func() { - It("Doesn't return gaps if there are none", func() { - payload0 := mocks.MockConvertedPayload - payload0.Block = newMockBlock(0) - payload1 := mocks.MockConvertedPayload - payload2 := payload1 - payload2.Block = newMockBlock(2) - payload3 := payload2 - payload3.Block = newMockBlock(3) - err := repo.Publish(payload0) - Expect(err).ToNot(HaveOccurred()) - err = repo.Publish(payload1) - Expect(err).ToNot(HaveOccurred()) - err = repo.Publish(payload2) - Expect(err).ToNot(HaveOccurred()) - err = repo.Publish(payload3) - Expect(err).ToNot(HaveOccurred()) - gaps, err := retriever.RetrieveGapsInData(1) - Expect(err).ToNot(HaveOccurred()) - Expect(len(gaps)).To(Equal(0)) - }) - - It("Returns the gap from 0 to the earliest block", func() { - payload := mocks.MockConvertedPayload - payload.Block = newMockBlock(5) - err := repo.Publish(payload) - Expect(err).ToNot(HaveOccurred()) - gaps, err := retriever.RetrieveGapsInData(1) - Expect(err).ToNot(HaveOccurred()) - Expect(len(gaps)).To(Equal(1)) - Expect(gaps[0].Start).To(Equal(uint64(0))) - Expect(gaps[0].Stop).To(Equal(uint64(4))) - }) - - It("Can handle single block gaps", func() { - payload0 := mocks.MockConvertedPayload - payload0.Block = newMockBlock(0) - payload1 := mocks.MockConvertedPayload - payload3 := payload1 - payload3.Block = newMockBlock(3) - err := repo.Publish(payload0) - Expect(err).ToNot(HaveOccurred()) - err = repo.Publish(payload1) - Expect(err).ToNot(HaveOccurred()) - err = repo.Publish(payload3) - Expect(err).ToNot(HaveOccurred()) - gaps, err := retriever.RetrieveGapsInData(1) - Expect(err).ToNot(HaveOccurred()) - Expect(len(gaps)).To(Equal(1)) - Expect(gaps[0].Start).To(Equal(uint64(2))) - Expect(gaps[0].Stop).To(Equal(uint64(2))) - }) - - It("Finds gap between two entries", func() { - payload1 := mocks.MockConvertedPayload - payload1.Block = newMockBlock(1010101) - payload2 := payload1 - payload2.Block = newMockBlock(0) - err := repo.Publish(payload1) - Expect(err).ToNot(HaveOccurred()) - err = repo.Publish(payload2) - Expect(err).ToNot(HaveOccurred()) - gaps, err := retriever.RetrieveGapsInData(1) - Expect(err).ToNot(HaveOccurred()) - Expect(len(gaps)).To(Equal(1)) - Expect(gaps[0].Start).To(Equal(uint64(1))) - Expect(gaps[0].Stop).To(Equal(uint64(1010100))) - }) - - It("Finds gaps between multiple entries", func() { - payload1 := mocks.MockConvertedPayload - payload1.Block = newMockBlock(1010101) - payload2 := mocks.MockConvertedPayload - payload2.Block = newMockBlock(1) - payload3 := mocks.MockConvertedPayload - payload3.Block = newMockBlock(5) - payload4 := mocks.MockConvertedPayload - payload4.Block = newMockBlock(100) - payload5 := mocks.MockConvertedPayload - payload5.Block = newMockBlock(101) - payload6 := mocks.MockConvertedPayload - payload6.Block = newMockBlock(102) - payload7 := mocks.MockConvertedPayload - payload7.Block = newMockBlock(103) - payload8 := mocks.MockConvertedPayload - payload8.Block = newMockBlock(104) - payload9 := mocks.MockConvertedPayload - payload9.Block = newMockBlock(105) - payload10 := mocks.MockConvertedPayload - payload10.Block = newMockBlock(106) - payload11 := mocks.MockConvertedPayload - payload11.Block = newMockBlock(1000) - - err := repo.Publish(payload1) - Expect(err).ToNot(HaveOccurred()) - err = repo.Publish(payload2) - Expect(err).ToNot(HaveOccurred()) - err = repo.Publish(payload3) - Expect(err).ToNot(HaveOccurred()) - err = repo.Publish(payload4) - Expect(err).ToNot(HaveOccurred()) - err = repo.Publish(payload5) - Expect(err).ToNot(HaveOccurred()) - err = repo.Publish(payload6) - Expect(err).ToNot(HaveOccurred()) - err = repo.Publish(payload7) - Expect(err).ToNot(HaveOccurred()) - err = repo.Publish(payload8) - Expect(err).ToNot(HaveOccurred()) - err = repo.Publish(payload9) - Expect(err).ToNot(HaveOccurred()) - err = repo.Publish(payload10) - Expect(err).ToNot(HaveOccurred()) - err = repo.Publish(payload11) - Expect(err).ToNot(HaveOccurred()) - - gaps, err := retriever.RetrieveGapsInData(1) - Expect(err).ToNot(HaveOccurred()) - Expect(len(gaps)).To(Equal(5)) - Expect(shared.ListContainsGap(gaps, eth2.DBGap{Start: 0, Stop: 0})).To(BeTrue()) - Expect(shared.ListContainsGap(gaps, eth2.DBGap{Start: 2, Stop: 4})).To(BeTrue()) - Expect(shared.ListContainsGap(gaps, eth2.DBGap{Start: 6, Stop: 99})).To(BeTrue()) - Expect(shared.ListContainsGap(gaps, eth2.DBGap{Start: 107, Stop: 999})).To(BeTrue()) - Expect(shared.ListContainsGap(gaps, eth2.DBGap{Start: 1001, Stop: 1010100})).To(BeTrue()) - }) - - It("Finds validation level gaps", func() { - - payload1 := mocks.MockConvertedPayload - payload1.Block = newMockBlock(1010101) - payload2 := mocks.MockConvertedPayload - payload2.Block = newMockBlock(1) - payload3 := mocks.MockConvertedPayload - payload3.Block = newMockBlock(5) - payload4 := mocks.MockConvertedPayload - payload4.Block = newMockBlock(100) - payload5 := mocks.MockConvertedPayload - payload5.Block = newMockBlock(101) - payload6 := mocks.MockConvertedPayload - payload6.Block = newMockBlock(102) - payload7 := mocks.MockConvertedPayload - payload7.Block = newMockBlock(103) - payload8 := mocks.MockConvertedPayload - payload8.Block = newMockBlock(104) - payload9 := mocks.MockConvertedPayload - payload9.Block = newMockBlock(105) - payload10 := mocks.MockConvertedPayload - payload10.Block = newMockBlock(106) - payload11 := mocks.MockConvertedPayload - payload11.Block = newMockBlock(107) - payload12 := mocks.MockConvertedPayload - payload12.Block = newMockBlock(108) - payload13 := mocks.MockConvertedPayload - payload13.Block = newMockBlock(109) - payload14 := mocks.MockConvertedPayload - payload14.Block = newMockBlock(1000) - - err := repo.Publish(payload1) - Expect(err).ToNot(HaveOccurred()) - err = repo.Publish(payload2) - Expect(err).ToNot(HaveOccurred()) - err = repo.Publish(payload3) - Expect(err).ToNot(HaveOccurred()) - err = repo.Publish(payload4) - Expect(err).ToNot(HaveOccurred()) - err = repo.Publish(payload5) - Expect(err).ToNot(HaveOccurred()) - err = repo.Publish(payload6) - Expect(err).ToNot(HaveOccurred()) - err = repo.Publish(payload7) - Expect(err).ToNot(HaveOccurred()) - err = repo.Publish(payload8) - Expect(err).ToNot(HaveOccurred()) - err = repo.Publish(payload9) - Expect(err).ToNot(HaveOccurred()) - err = repo.Publish(payload10) - Expect(err).ToNot(HaveOccurred()) - err = repo.Publish(payload11) - Expect(err).ToNot(HaveOccurred()) - err = repo.Publish(payload12) - Expect(err).ToNot(HaveOccurred()) - err = repo.Publish(payload13) - Expect(err).ToNot(HaveOccurred()) - err = repo.Publish(payload14) - Expect(err).ToNot(HaveOccurred()) - - cleaner := eth2.NewDBCleaner(db) - err = cleaner.ResetValidation([][2]uint64{{101, 102}, {104, 104}, {106, 108}}) - Expect(err).ToNot(HaveOccurred()) - - gaps, err := retriever.RetrieveGapsInData(1) - Expect(err).ToNot(HaveOccurred()) - Expect(len(gaps)).To(Equal(8)) - Expect(shared.ListContainsGap(gaps, eth2.DBGap{Start: 0, Stop: 0})).To(BeTrue()) - Expect(shared.ListContainsGap(gaps, eth2.DBGap{Start: 2, Stop: 4})).To(BeTrue()) - Expect(shared.ListContainsGap(gaps, eth2.DBGap{Start: 6, Stop: 99})).To(BeTrue()) - Expect(shared.ListContainsGap(gaps, eth2.DBGap{Start: 101, Stop: 102})).To(BeTrue()) - Expect(shared.ListContainsGap(gaps, eth2.DBGap{Start: 104, Stop: 104})).To(BeTrue()) - Expect(shared.ListContainsGap(gaps, eth2.DBGap{Start: 106, Stop: 108})).To(BeTrue()) - Expect(shared.ListContainsGap(gaps, eth2.DBGap{Start: 110, Stop: 999})).To(BeTrue()) - Expect(shared.ListContainsGap(gaps, eth2.DBGap{Start: 1001, Stop: 1010100})).To(BeTrue()) - }) - }) }) func newMockBlock(blockNumber uint64) *types.Block { diff --git a/pkg/eth/filterer.go b/pkg/eth/filterer.go index 8f41833c..eaea466c 100644 --- a/pkg/eth/filterer.go +++ b/pkg/eth/filterer.go @@ -26,9 +26,9 @@ import ( "github.com/ethereum/go-ethereum/statediff" "github.com/multiformats/go-multihash" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/eth" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/ipfs" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/ipfs/ipld" + "github.com/vulcanize/ipld-eth-indexer/pkg/eth" + "github.com/vulcanize/ipld-eth-indexer/pkg/ipfs" + "github.com/vulcanize/ipld-eth-indexer/pkg/ipfs/ipld" ) // Filterer interface for substituing mocks in tests diff --git a/pkg/eth/filterer_test.go b/pkg/eth/filterer_test.go index 41cbcf93..9495a010 100644 --- a/pkg/eth/filterer_test.go +++ b/pkg/eth/filterer_test.go @@ -23,8 +23,8 @@ import ( . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/eth/mocks" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/ipfs" + "github.com/vulcanize/ipld-eth-indexer/pkg/eth/mocks" + "github.com/vulcanize/ipld-eth-indexer/pkg/ipfs" "github.com/vulcanize/ipld-eth-server/pkg/eth" "github.com/vulcanize/ipld-eth-server/pkg/shared" diff --git a/pkg/eth/ipld_fetcher.go b/pkg/eth/ipld_fetcher.go index 0ed3df6c..c5ca2f8b 100644 --- a/pkg/eth/ipld_fetcher.go +++ b/pkg/eth/ipld_fetcher.go @@ -25,9 +25,9 @@ import ( "github.com/jmoiron/sqlx" log "github.com/sirupsen/logrus" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/eth" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/ipfs" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/postgres" + "github.com/vulcanize/ipld-eth-indexer/pkg/eth" + "github.com/vulcanize/ipld-eth-indexer/pkg/ipfs" + "github.com/vulcanize/ipld-eth-indexer/pkg/postgres" "github.com/vulcanize/ipld-eth-server/pkg/shared" ) diff --git a/pkg/eth/ipld_fetcher_test.go b/pkg/eth/ipld_fetcher_test.go index a370c774..04761289 100644 --- a/pkg/eth/ipld_fetcher_test.go +++ b/pkg/eth/ipld_fetcher_test.go @@ -20,9 +20,9 @@ import ( . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" - eth2 "github.com/vulcanize/ipfs-blockchain-watcher/pkg/eth" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/eth/mocks" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/postgres" + eth2 "github.com/vulcanize/ipld-eth-indexer/pkg/eth" + "github.com/vulcanize/ipld-eth-indexer/pkg/eth/mocks" + "github.com/vulcanize/ipld-eth-indexer/pkg/postgres" "github.com/vulcanize/ipld-eth-server/pkg/eth" "github.com/vulcanize/ipld-eth-server/pkg/shared" diff --git a/pkg/eth/test_helpers.go b/pkg/eth/test_helpers.go index 48fd71a2..c39ec5c3 100644 --- a/pkg/eth/test_helpers.go +++ b/pkg/eth/test_helpers.go @@ -19,8 +19,8 @@ package eth import ( . "github.com/onsi/gomega" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/eth" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/postgres" + "github.com/vulcanize/ipld-eth-indexer/pkg/eth" + "github.com/vulcanize/ipld-eth-indexer/pkg/postgres" ) // TearDownDB is used to tear down the watcher dbs after tests diff --git a/pkg/serve/api.go b/pkg/serve/api.go index 0e2aaa4d..7b2e2382 100644 --- a/pkg/serve/api.go +++ b/pkg/serve/api.go @@ -23,8 +23,6 @@ import ( "github.com/ethereum/go-ethereum/rpc" log "github.com/sirupsen/logrus" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/node" - "github.com/vulcanize/ipld-eth-server/pkg/eth" "github.com/vulcanize/ipld-eth-server/pkg/shared" v "github.com/vulcanize/ipld-eth-server/version" @@ -87,12 +85,6 @@ func (api *PublicServerAPI) Stream(ctx context.Context, params eth.SubscriptionS return rpcSub, nil } -// Node is a public rpc method to allow transformers to fetch the node info for the watcher -// NOTE: this is the node info for the node that the watcher is syncing from, not the node info for the watcher itself -func (api *PublicServerAPI) Node() *node.Info { - return api.w.Node() -} - // Chain returns the chain type that this watcher instance supports func (api *PublicServerAPI) Chain() shared.ChainType { return api.w.Chain() diff --git a/pkg/serve/config.go b/pkg/serve/config.go index 0a2a0197..382d533f 100644 --- a/pkg/serve/config.go +++ b/pkg/serve/config.go @@ -20,12 +20,13 @@ import ( "os" "path/filepath" + "github.com/vulcanize/ipld-eth-indexer/pkg/node" + "github.com/spf13/viper" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/node" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/postgres" + "github.com/vulcanize/ipld-eth-indexer/pkg/postgres" - "github.com/vulcanize/ipld-eth-server/utils" + "github.com/vulcanize/ipld-eth-indexer/utils" ) // Env variables @@ -79,7 +80,7 @@ func NewConfig() (*Config, error) { } c.HTTPEndpoint = httpPath overrideDBConnConfig(&c.DBConfig) - serveDB := utils.LoadPostgres(c.DBConfig, postgres.Info{}) + serveDB := utils.LoadPostgres(c.DBConfig, node.Info{}) c.DB = &serveDB return c, nil diff --git a/pkg/serve/service.go b/pkg/serve/service.go index 3226d6e2..34b39f6d 100644 --- a/pkg/serve/service.go +++ b/pkg/serve/service.go @@ -28,9 +28,8 @@ import ( "github.com/ethereum/go-ethereum/rpc" log "github.com/sirupsen/logrus" - eth2 "github.com/vulcanize/ipfs-blockchain-watcher/pkg/eth" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/node" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/postgres" + eth2 "github.com/vulcanize/ipld-eth-indexer/pkg/eth" + "github.com/vulcanize/ipld-eth-indexer/pkg/postgres" "github.com/vulcanize/ipld-eth-server/pkg/eth" "github.com/vulcanize/ipld-eth-server/pkg/shared" @@ -88,7 +87,6 @@ func NewServer(settings *Config) (Server, error) { sn.QuitChan = make(chan bool) sn.Subscriptions = make(map[common.Hash]map[rpc.ID]Subscription) sn.SubscriptionTypes = make(map[common.Hash]eth.SubscriptionSettings) - sn.NodeInfo = &settings.NodeInfo return sn, nil } @@ -273,7 +271,7 @@ func (sap *Service) sendHistoricalData(sub Subscription, id rpc.ID, params eth.S for i := startingBlock; i <= endingBlock; i++ { select { case <-sap.QuitChan: - log.Infof("%s watcher historical data feed to subscription %s closed", id) + log.Infof("ethereum historical data feed to subscription %s closed", id) return default: } @@ -309,7 +307,7 @@ func (sap *Service) sendHistoricalData(sub Subscription, id rpc.ID, params eth.S case sub.PayloadChan <- SubscriptionPayload{Data: nil, Err: "", Flag: BackFillCompleteFlag}: log.Debugf("eth ipld server sending backFill completion notice to subscription %s", id) default: - log.Infof("eth ipld server unable to send backFill completion notice to %s subscription %s", id) + log.Infof("eth ipld server unable to send backFill completion notice to subscription %s", id) } }() return nil diff --git a/pkg/shared/env.go b/pkg/shared/env.go index c14774a4..41c467a0 100644 --- a/pkg/shared/env.go +++ b/pkg/shared/env.go @@ -18,7 +18,7 @@ package shared import ( "github.com/spf13/viper" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/node" + "github.com/vulcanize/ipld-eth-indexer/pkg/node" ) // Env variables diff --git a/pkg/shared/functions.go b/pkg/shared/functions.go index 9b364ec6..3a51e53a 100644 --- a/pkg/shared/functions.go +++ b/pkg/shared/functions.go @@ -24,7 +24,7 @@ import ( node "github.com/ipfs/go-ipld-format" "github.com/jmoiron/sqlx" "github.com/sirupsen/logrus" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/ipfs/ipld" + "github.com/vulcanize/ipld-eth-indexer/pkg/ipfs/ipld" ) // HandleZeroAddrPointer will return an emtpy string for a nil address pointer diff --git a/pkg/shared/test_helpers.go b/pkg/shared/test_helpers.go index 38eead8e..9c2dcbb8 100644 --- a/pkg/shared/test_helpers.go +++ b/pkg/shared/test_helpers.go @@ -19,14 +19,14 @@ package shared import ( "bytes" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/eth" + "github.com/vulcanize/ipld-eth-indexer/pkg/eth" "github.com/ipfs/go-cid" "github.com/multiformats/go-multihash" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/ipfs" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/node" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/postgres" + "github.com/vulcanize/ipld-eth-indexer/pkg/ipfs" + "github.com/vulcanize/ipld-eth-indexer/pkg/node" + "github.com/vulcanize/ipld-eth-indexer/pkg/postgres" ) // SetupDB is use to setup a db for watcher tests diff --git a/test_config/test_config.go b/test_config/test_config.go index 88678dd9..a2bb2b3b 100644 --- a/test_config/test_config.go +++ b/test_config/test_config.go @@ -22,7 +22,7 @@ import ( "github.com/sirupsen/logrus" "github.com/spf13/viper" - "github.com/vulcanize/ipfs-blockchain-watcher/pkg/postgres" + "github.com/vulcanize/ipld-eth-indexer/pkg/postgres" ) var DBConfig postgres.Config From 065e70185acd004ef05a8a62c3abe8cf3cb47fee Mon Sep 17 00:00:00 2001 From: Ian Norden Date: Wed, 2 Sep 2020 10:22:55 -0500 Subject: [PATCH 10/12] using local dep until we cut release of ipld-eth-indexer --- README.md | 14 +++++++------- go.mod | 12 +++--------- go.sum | 8 -------- 3 files changed, 10 insertions(+), 24 deletions(-) diff --git a/README.md b/README.md index e1782ca4..f6581d71 100644 --- a/README.md +++ b/README.md @@ -79,13 +79,13 @@ TODO: Port the IPLD RPC subscription endpoints after the decoupling #### Ethereum JSON-RPC ipld-eth-server currently recapitulates portions of the Ethereum JSON-RPC api standard. -The currently supported standard endpoints are: -`eth_blockNumber` -`eth_getLogs` -`eth_getHeaderByNumber` -`eth_getBlockByNumber` -`eth_getBlockByHash` -`eth_getTransactionByHash` +The currently supported standard endpoints are: +`eth_blockNumber` +`eth_getLogs` +`eth_getHeaderByNumber` +`eth_getBlockByNumber` +`eth_getBlockByHash` +`eth_getTransactionByHash` TODO: Add the rest of the standard endpoints add unique endpoints (e.g. getSlice) diff --git a/go.mod b/go.mod index 19a272db..65156f29 100644 --- a/go.mod +++ b/go.mod @@ -3,17 +3,10 @@ module github.com/vulcanize/ipld-eth-server go 1.13 require ( - github.com/btcsuite/btcd v0.20.1-beta - github.com/btcsuite/btcutil v1.0.2 github.com/ethereum/go-ethereum v1.9.11 - github.com/ipfs/go-block-format v0.0.2 - github.com/ipfs/go-blockservice v0.1.3 github.com/ipfs/go-cid v0.0.5 - github.com/ipfs/go-filestore v1.0.0 // indirect - github.com/ipfs/go-ipfs v0.5.1 github.com/ipfs/go-ipfs-blockstore v1.0.0 github.com/ipfs/go-ipfs-ds-help v1.0.0 - github.com/ipfs/go-ipfs-exchange-interface v0.0.1 github.com/ipfs/go-ipld-format v0.2.0 github.com/jmoiron/sqlx v1.2.0 github.com/lib/pq v1.5.2 @@ -23,9 +16,10 @@ require ( github.com/sirupsen/logrus v1.6.0 github.com/spf13/cobra v1.0.0 github.com/spf13/viper v1.7.0 + github.com/vulcanize/ipld-eth-indexer v0.0.11-alpha github.com/vulcanize/pg-ipfs-ethdb v0.0.1-alpha - golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2 // indirect - golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a // indirect ) replace github.com/ethereum/go-ethereum v1.9.11 => github.com/vulcanize/go-ethereum v1.9.11-statediff-0.0.5 + +replace github.com/vulcanize/ipld-eth-indexer => /Users/iannorden/go/src/github.com/vulcanize/ipld-eth-indexer diff --git a/go.sum b/go.sum index 9982aa34..a0b2b03b 100644 --- a/go.sum +++ b/go.sum @@ -75,19 +75,14 @@ github.com/btcsuite/btcd v0.0.0-20190523000118-16327141da8c/go.mod h1:3J08xEfcug github.com/btcsuite/btcd v0.0.0-20190605094302-a0d1e3e36d50/go.mod h1:3J08xEfcugPacsc34/LKRU2yO7YmuT8yt28J8k2+rrI= github.com/btcsuite/btcd v0.0.0-20190629003639-c26ffa870fd8/go.mod h1:3J08xEfcugPacsc34/LKRU2yO7YmuT8yt28J8k2+rrI= github.com/btcsuite/btcd v0.0.0-20190824003749-130ea5bddde3/go.mod h1:3J08xEfcugPacsc34/LKRU2yO7YmuT8yt28J8k2+rrI= -github.com/btcsuite/btcd v0.20.1-beta h1:Ik4hyJqN8Jfyv3S4AGBOmyouMsYE3EdYODkMbQjwPGw= github.com/btcsuite/btcd v0.20.1-beta/go.mod h1:wVuoA8VJLEcwgqHBwHmzLRazpKxTv13Px/pDuV7OomQ= -github.com/btcsuite/btclog v0.0.0-20170628155309-84c8d2346e9f h1:bAs4lUbRJpnnkd9VhRV3jjAVU7DJVjMaK+IsvSeZvFo= github.com/btcsuite/btclog v0.0.0-20170628155309-84c8d2346e9f/go.mod h1:TdznJufoqS23FtqVCzL0ZqgP5MqXbb4fg/WgDys70nA= github.com/btcsuite/btcutil v0.0.0-20190207003914-4c204d697803/go.mod h1:+5NJ2+qvTyV9exUAL/rxXi3DcLg2Ts+ymUAY5y4NvMg= github.com/btcsuite/btcutil v0.0.0-20190425235716-9e5f4b9a998d/go.mod h1:+5NJ2+qvTyV9exUAL/rxXi3DcLg2Ts+ymUAY5y4NvMg= -github.com/btcsuite/btcutil v1.0.2 h1:9iZ1Terx9fMIOtq1VrwdqfsATL9MC2l8ZrUY6YZ2uts= github.com/btcsuite/btcutil v1.0.2/go.mod h1:j9HUFwoQRsZL3V4n+qG+CUnEGHOarIxfC3Le2Yhbcts= -github.com/btcsuite/go-socks v0.0.0-20170105172521-4720035b7bfd h1:R/opQEbFEy9JGkIguV40SvRY1uliPX8ifOvi6ICsFCw= github.com/btcsuite/go-socks v0.0.0-20170105172521-4720035b7bfd/go.mod h1:HHNXQzUsZCxOoE+CPiyCTO6x34Zs86zZUiwtpXoGdtg= github.com/btcsuite/goleveldb v0.0.0-20160330041536-7834afc9e8cd/go.mod h1:F+uVaaLLH7j4eDXPRvw78tMflu7Ie2bzYOH4Y8rRKBY= github.com/btcsuite/snappy-go v0.0.0-20151229074030-0bdef8d06723/go.mod h1:8woku9dyThutzjeg+3xrA5iCpBRH8XEEg3lh6TiUghc= -github.com/btcsuite/websocket v0.0.0-20150119174127-31079b680792 h1:R8vQdOQdZ9Y3SkEwmHoWBmX1DNXhXZqlTpq6s4tyJGc= github.com/btcsuite/websocket v0.0.0-20150119174127-31079b680792/go.mod h1:ghJtEyQwv5/p4Mg4C0fgbePVuGr935/5ddU9Z3TmDRY= github.com/btcsuite/winsvc v1.0.0/go.mod h1:jsenWakMcC0zFBFurPLEAyrnc/teJEM1O46fmI40EZs= github.com/buger/jsonparser v0.0.0-20181115193947-bf1c66bbce23/go.mod h1:bbYlZJ7hK1yFx9hf58LP0zeX7UjIGs20ufpu3evjr+s= @@ -932,11 +927,8 @@ github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljT github.com/urfave/cli v1.22.1/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= github.com/viant/assertly v0.4.8/go.mod h1:aGifi++jvCrUaklKEKT0BU95igDNaqkvz+49uaYMPRU= github.com/viant/toolbox v0.24.0/go.mod h1:OxMCG57V0PXuIP2HNQrtJf2CjqdmbrOx5EkMILuUhzM= -github.com/vulcanize/go-ethereum v1.9.11-statediff-0.0.2 h1:ebv2bWocCmNKGnpHtRjSWoTpkgyEbRBb028PanH43H8= -github.com/vulcanize/go-ethereum v1.9.11-statediff-0.0.2/go.mod h1:7oC0Ni6dosMv5pxMigm6s0hN8g4haJMBnqmmo0D9YfQ= github.com/vulcanize/go-ethereum v1.9.11-statediff-0.0.5 h1:U+BqhjRLR22e9OEm8cgWC3Eq3bh8G6azjNpXeenfCG4= github.com/vulcanize/go-ethereum v1.9.11-statediff-0.0.5/go.mod h1:7oC0Ni6dosMv5pxMigm6s0hN8g4haJMBnqmmo0D9YfQ= -github.com/vulcanize/ipfs-blockchain-watcher v0.0.9 h1:pKL378Wtuhi8HPw3ZqV/3UBgJngUw1Ke4w5GKVM52pY= github.com/vulcanize/pg-ipfs-ethdb v0.0.1-alpha h1:Y7j0Hw1jgVVOg+eUGUr7OgH+gOBID0DwbsfZV1KoL7I= github.com/vulcanize/pg-ipfs-ethdb v0.0.1-alpha/go.mod h1:OuqE4r2LGWAtDVx3s1yaAzDcwy+LEAqrWaE1L8UfrGY= github.com/wangjia184/sortedset v0.0.0-20160527075905-f5d03557ba30/go.mod h1:YkocrP2K2tcw938x9gCOmT5G5eCD6jsTz0SZuyAqwIE= From e917ccb973bbd20bec8e93f63381cf5204885f31 Mon Sep 17 00:00:00 2001 From: Ian Norden Date: Wed, 2 Sep 2020 14:04:36 -0500 Subject: [PATCH 11/12] use v0.2.0-alpha release of eth-ipld-indexer --- go.mod | 4 +--- go.sum | 31 +++++++++++++++++++++++++++++++ 2 files changed, 32 insertions(+), 3 deletions(-) diff --git a/go.mod b/go.mod index 65156f29..028423f3 100644 --- a/go.mod +++ b/go.mod @@ -16,10 +16,8 @@ require ( github.com/sirupsen/logrus v1.6.0 github.com/spf13/cobra v1.0.0 github.com/spf13/viper v1.7.0 - github.com/vulcanize/ipld-eth-indexer v0.0.11-alpha + github.com/vulcanize/ipld-eth-indexer v0.2.0-alpha github.com/vulcanize/pg-ipfs-ethdb v0.0.1-alpha ) replace github.com/ethereum/go-ethereum v1.9.11 => github.com/vulcanize/go-ethereum v1.9.11-statediff-0.0.5 - -replace github.com/vulcanize/ipld-eth-indexer => /Users/iannorden/go/src/github.com/vulcanize/ipld-eth-indexer diff --git a/go.sum b/go.sum index a0b2b03b..054a0e26 100644 --- a/go.sum +++ b/go.sum @@ -35,11 +35,13 @@ github.com/Azure/go-autorest/autorest/mocks v0.2.0/go.mod h1:OTyCOPRA2IgIlWxVYxB github.com/Azure/go-autorest/autorest/mocks v0.3.0/go.mod h1:a8FDP3DYzQ4RYfVAxAN3SVSiiO77gL2j2ronKKP0syM= github.com/Azure/go-autorest/logger v0.1.0/go.mod h1:oExouG+K6PryycPJfVSxi/koC6LSNgds39diKLz7Vrc= github.com/Azure/go-autorest/tracing v0.5.0/go.mod h1:r/s2XiOKccPW3HrqB+W0TQzfbtp2fGCgRFtBroKn4Dk= +github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= github.com/Kubuxu/go-os-helper v0.0.1/go.mod h1:N8B+I7vPCT80IcP58r50u4+gEEcsZETFUpAzWW2ep1Y= github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= github.com/OneOfOne/xxhash v1.2.5/go.mod h1:eZbhyaAYD41SGSSsnmcpxVoRiQ/MPUTjUdIIOT9Um7Q= +github.com/StackExchange/wmi v0.0.0-20180116203802-5d049714c4a6 h1:fLjPD/aNc3UIOA6tDi6QXUemppXK3P9BI7mr2hd6gx8= github.com/StackExchange/wmi v0.0.0-20180116203802-5d049714c4a6/go.mod h1:3eOhrUMpNV+6aFIbp5/iudMxNCF27Vw2OZgy4xEx0Fg= github.com/Stebalien/go-bitfield v0.0.0-20180330043415-076a62f9ce6e/go.mod h1:3oM7gXIttpYDAJXpVNnSCiUMYBLIZ6cb1t+Ip982MRo= github.com/Stebalien/go-bitfield v0.0.1/go.mod h1:GNjFpasyUVkHMsfEOk8EFLJ9syQ6SI+XWrX9Wf2XH0s= @@ -52,6 +54,7 @@ github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuy github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= github.com/alexbrainman/goissue34681 v0.0.0-20191006012335-3fc7a47baff5/go.mod h1:Y2QMoi1vgtOIfc+6DhrMOGkLoGzqSV2rKp4Sm+opsyA= +github.com/allegro/bigcache v1.2.1-0.20190218064605-e24eb225f156 h1:eMwmnE/GDgah4HI848JfFxHt+iPb26b4zyfspmqY0/8= github.com/allegro/bigcache v1.2.1-0.20190218064605-e24eb225f156/go.mod h1:Cb/ax3seSYIx7SuZdm2G2xzfwmv3TPSk2ucNfQESPXM= github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239/go.mod h1:2FmKhYUyUczH0OGQWaF5ceTx0UBShxjsH6f8oGKYe2c= github.com/aristanetworks/goarista v0.0.0-20170210015632-ea17b1a17847 h1:rtI0fD4oG/8eVokGVPYJEW1F88p1ZNgXiEIs9thEE4A= @@ -75,6 +78,7 @@ github.com/btcsuite/btcd v0.0.0-20190523000118-16327141da8c/go.mod h1:3J08xEfcug github.com/btcsuite/btcd v0.0.0-20190605094302-a0d1e3e36d50/go.mod h1:3J08xEfcugPacsc34/LKRU2yO7YmuT8yt28J8k2+rrI= github.com/btcsuite/btcd v0.0.0-20190629003639-c26ffa870fd8/go.mod h1:3J08xEfcugPacsc34/LKRU2yO7YmuT8yt28J8k2+rrI= github.com/btcsuite/btcd v0.0.0-20190824003749-130ea5bddde3/go.mod h1:3J08xEfcugPacsc34/LKRU2yO7YmuT8yt28J8k2+rrI= +github.com/btcsuite/btcd v0.20.1-beta h1:Ik4hyJqN8Jfyv3S4AGBOmyouMsYE3EdYODkMbQjwPGw= github.com/btcsuite/btcd v0.20.1-beta/go.mod h1:wVuoA8VJLEcwgqHBwHmzLRazpKxTv13Px/pDuV7OomQ= github.com/btcsuite/btclog v0.0.0-20170628155309-84c8d2346e9f/go.mod h1:TdznJufoqS23FtqVCzL0ZqgP5MqXbb4fg/WgDys70nA= github.com/btcsuite/btcutil v0.0.0-20190207003914-4c204d697803/go.mod h1:+5NJ2+qvTyV9exUAL/rxXi3DcLg2Ts+ymUAY5y4NvMg= @@ -87,6 +91,7 @@ github.com/btcsuite/websocket v0.0.0-20150119174127-31079b680792/go.mod h1:ghJtE github.com/btcsuite/winsvc v1.0.0/go.mod h1:jsenWakMcC0zFBFurPLEAyrnc/teJEM1O46fmI40EZs= github.com/buger/jsonparser v0.0.0-20181115193947-bf1c66bbce23/go.mod h1:bbYlZJ7hK1yFx9hf58LP0zeX7UjIGs20ufpu3evjr+s= github.com/cenkalti/backoff v2.2.1+incompatible/go.mod h1:90ReRw6GdpyfrHakVjL/QHaoyV4aDUVVkXQJJJ3NXXM= +github.com/cespare/cp v0.1.0 h1:SE+dxFebS7Iik5LK0tsi1k9ZCxEaFX4AjQmoyA+1dJk= github.com/cespare/cp v0.1.0/go.mod h1:SOGHArjBr4JWaSDEVpWpo/hNg6RoKrls6Oh40hiwW+s= github.com/cespare/xxhash v1.1.0 h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko= github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= @@ -158,11 +163,15 @@ github.com/go-check/check v0.0.0-20180628173108-788fd7840127/go.mod h1:9ES+weclK github.com/go-errors/errors v1.0.1/go.mod h1:f4zRHt4oKfwPJE5k8C9vpYG+aDHdBFUsgrm6/TyX73Q= github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-kit/kit v0.9.0 h1:wDJmvq38kDhkVxi50ni9ykkdUr1PKgqKOoi01fa0Mdk= github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= +github.com/go-logfmt/logfmt v0.4.0 h1:MP4Eh7ZCb31lleYCFuwm0oe4/YGak+5l1vA2NOE80nA= github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= +github.com/go-ole/go-ole v1.2.1 h1:2lOsA72HgjxAuMlKpFiCbHTvu44PIVkZ5hqm3RSdI/E= github.com/go-ole/go-ole v1.2.1/go.mod h1:7FAglXiTm7HKlQRDeOQ6ZNUHidzCWXuZWq/1dTyBNF8= github.com/go-sourcemap/sourcemap v2.1.2+incompatible/go.mod h1:F8jJfvm2KbVjc5NqelyYJmf/v5J0dwNLS2mL4sNA1Jg= +github.com/go-sql-driver/mysql v1.4.0 h1:7LxgVwFb2hIQtMm87NdgAVfXjnt4OePseqT1tKx+opk= github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w= github.com/go-stack/stack v1.8.0 h1:5SgMzNM5HxrEjV0ww2lTmX6E2Izsfxas4+YHWRs3Lsk= github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= @@ -200,6 +209,7 @@ github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.4.0 h1:xsAVV57WRhGj6kEIi8ReJzQlHHqcBYCElAvkovg3B/4= github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-github v17.0.0+incompatible/go.mod h1:zLgOLi98H3fifZn+44m+umXrS52loVEgC2AApnigrVQ= github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck= @@ -216,6 +226,7 @@ github.com/googleapis/gax-go/v2 v2.0.3/go.mod h1:LLvjysVCY1JZeum8Z6l8qUty8fiNwE0 github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= +github.com/gopherjs/gopherjs v0.0.0-20190430165422-3e4dfb77656c h1:7lF+Vz0LqiRidnzC1Oq86fpX1q/iEv2KJdrCtttYjT4= github.com/gopherjs/gopherjs v0.0.0-20190430165422-3e4dfb77656c/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= github.com/gorilla/websocket v1.4.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= github.com/gorilla/websocket v1.4.1-0.20190629185528-ae1634f6a989/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= @@ -263,6 +274,7 @@ github.com/huin/goupnp v0.0.0-20180415215157-1395d1447324/go.mod h1:MZ2ZmwcBpvOo github.com/huin/goupnp v1.0.0 h1:wg75sLpL6DZqwHQN6E1Cfk6mtfzS45z8OV+ic+DtHRo= github.com/huin/goupnp v1.0.0/go.mod h1:n9v9KO1tAxYH82qOn+UTIFQDmx5n1Zxd/ClZDMX7Bnc= github.com/huin/goutil v0.0.0-20170803182201-1ca381bf3150/go.mod h1:PpLOETDnJ0o3iZrZfqZzyLl6l7F3c6L1oWn7OICBi6o= +github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM= github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= github.com/influxdata/influxdb v1.2.3-0.20180221223340-01288bdb0883/go.mod h1:qZna6X/4elxqT3yI9iZYdZrWWdeFOOprn86kgg4+IzY= github.com/ipfs/bbloom v0.0.1/go.mod h1:oqo8CVWsJFMOZqTglBG4wydCE4IQA/G2/SEofB0rjUI= @@ -427,6 +439,7 @@ github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCV github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= github.com/jtolds/gls v4.2.1+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= +github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo= github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= github.com/julienschmidt/httprouter v1.1.1-0.20170430222011-975b5c4c7c21/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= @@ -438,14 +451,18 @@ github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQL github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/kkdai/bstream v0.0.0-20161212061736-f391b8402d23/go.mod h1:J+Gs4SYgM6CZQHDETBtE9HaSEkGmuNXF86RwHhHUvq4= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/konsorten/go-windows-terminal-sequences v1.0.3 h1:CE8S1cTafDpPvMhIxNJKvHsGVBgn1xWYf1NbHQhywc8= github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/koron/go-ssdp v0.0.0-20180514024734-4a0ed625a78b/go.mod h1:5Ky9EC2xfoUKUor0Hjgi2BJhCSXJfMOFlmyYrVKGQMk= github.com/koron/go-ssdp v0.0.0-20191105050749-2e1c40ed0b5d/go.mod h1:5Ky9EC2xfoUKUor0Hjgi2BJhCSXJfMOFlmyYrVKGQMk= +github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515 h1:T+h1c/A9Gawja4Y9mFVWj2vyii2bbUNDw3kt9VxK2EY= github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pretty v0.2.0 h1:s5hAObm+yFO5uHYt5dYjxi2rXrsnmRpJx4OYvIWUaQs= github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/pty v1.1.3/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= @@ -684,6 +701,7 @@ github.com/mattn/go-runewidth v0.0.3/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzp github.com/mattn/go-runewidth v0.0.4/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU= github.com/mattn/go-runewidth v0.0.8 h1:3tS41NlGYSmhhe/8fhGRzc+z3AYCw1Fe1WAyLuujKs0= github.com/mattn/go-runewidth v0.0.8/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI= +github.com/mattn/go-sqlite3 v1.9.0 h1:pDRiWfl+++eC2FEFRy6jXmQlvp4Yh3z1MJKg4UeYM/4= github.com/mattn/go-sqlite3 v1.9.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc= github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE= @@ -799,6 +817,7 @@ github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINE github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/polydawn/refmt v0.0.0-20190221155625-df39d6c2d992/go.mod h1:uIp+gprXxxrWSjjklXD+mN4wed/tMfjMMmN/9+JsA9o= github.com/polydawn/refmt v0.0.0-20190408063855-01bf1e26dd14/go.mod h1:uIp+gprXxxrWSjjklXD+mN4wed/tMfjMMmN/9+JsA9o= @@ -868,10 +887,12 @@ github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6Mwd github.com/sirupsen/logrus v1.6.0 h1:UBcNElsrwanuuMsnGSlYmtmgbb23qDR5dG+6X6Oo89I= github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88= github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= +github.com/smartystreets/assertions v1.0.0 h1:UVQPSSmc3qtTi+zPPkCXvZX9VvW/xT/NsRvKfwY81a8= github.com/smartystreets/assertions v1.0.0/go.mod h1:kHHU4qYBaI3q23Pp3VPrmWhuIUrLW/7eUrw0BU5VaoM= github.com/smartystreets/goconvey v0.0.0-20190222223459-a17d461953aa/go.mod h1:2RVY1rIf+2J2o/IM9+vPq9RzmHDSseB7FoXiSNIUsoU= github.com/smartystreets/goconvey v0.0.0-20190330032615-68dc04aab96a/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= github.com/smartystreets/goconvey v0.0.0-20190710185942-9d28bd7c0945/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= +github.com/smartystreets/goconvey v1.6.4 h1:fv0U8FUIMPNf1L9lnHLvLhgicrIVChEkdzIKYqbNC9s= github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= github.com/smola/gocompat v0.2.0/go.mod h1:1B0MlxbmoZNo3h8guHp8HztB3BSYR5itql9qtVc0ypY= github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM= @@ -910,6 +931,7 @@ github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+ github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/testify v1.5.1 h1:nOGnQDM7FYENwehXlg/kFVnos3rEvtKTjRvOWSzb6H4= github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= github.com/subosito/gotenv v1.2.0 h1:Slr1R9HxAlEKefgq5jn9U+DnETlIUa6HfgEzj0g5d7s= github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw= @@ -929,6 +951,8 @@ github.com/viant/assertly v0.4.8/go.mod h1:aGifi++jvCrUaklKEKT0BU95igDNaqkvz+49u github.com/viant/toolbox v0.24.0/go.mod h1:OxMCG57V0PXuIP2HNQrtJf2CjqdmbrOx5EkMILuUhzM= github.com/vulcanize/go-ethereum v1.9.11-statediff-0.0.5 h1:U+BqhjRLR22e9OEm8cgWC3Eq3bh8G6azjNpXeenfCG4= github.com/vulcanize/go-ethereum v1.9.11-statediff-0.0.5/go.mod h1:7oC0Ni6dosMv5pxMigm6s0hN8g4haJMBnqmmo0D9YfQ= +github.com/vulcanize/ipld-eth-indexer v0.2.0-alpha h1:+XVaC7TsA0K278YWpfqdrNxwgC6hY6fBaN8w2/e1Lts= +github.com/vulcanize/ipld-eth-indexer v0.2.0-alpha/go.mod h1:SuMBscFfcBHYlQuzDDd4by+R0S3gaAFjrOU+uQfAefE= github.com/vulcanize/pg-ipfs-ethdb v0.0.1-alpha h1:Y7j0Hw1jgVVOg+eUGUr7OgH+gOBID0DwbsfZV1KoL7I= github.com/vulcanize/pg-ipfs-ethdb v0.0.1-alpha/go.mod h1:OuqE4r2LGWAtDVx3s1yaAzDcwy+LEAqrWaE1L8UfrGY= github.com/wangjia184/sortedset v0.0.0-20160527075905-f5d03557ba30/go.mod h1:YkocrP2K2tcw938x9gCOmT5G5eCD6jsTz0SZuyAqwIE= @@ -977,6 +1001,7 @@ go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/ go.uber.org/multierr v1.4.0/go.mod h1:VgVr7evmIr6uPjLBxg28wmKNXyqE9akIJ5XnfpiKl+4= go.uber.org/multierr v1.5.0 h1:KCa4XfM8CWFCpxXRGok+Q0SS/0XBhMDbHHGABQLvD2A= go.uber.org/multierr v1.5.0/go.mod h1:FeouvMocqHpRaaGuG9EjoKcStLC43Zu/fmqdUMPcKYU= +go.uber.org/tools v0.0.0-20190618225709-2cfd321de3ee h1:0mgffUl7nfd+FpvXMVz4IDEaUSmT1ysygQC7qYo7sG4= go.uber.org/tools v0.0.0-20190618225709-2cfd321de3ee/go.mod h1:vJERXedbb3MVM5f9Ejo0C68/HhF8uaILCdgjnY+goOA= go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= go.uber.org/zap v1.14.1 h1:nYDKopTbvAPq/NrUVZwT15y2lpROBiLLyoRTbXOYWOo= @@ -1022,6 +1047,7 @@ golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHl golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f h1:J5lckAjkw6qYlOZNj90mLYNTEKDvWeuc1yieZ8qUzUE= golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs= golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= @@ -1150,6 +1176,7 @@ golang.org/x/tools v0.0.0-20191108193012-7d206e10da11/go.mod h1:b+2E5dAYhXwXZwtn golang.org/x/tools v0.0.0-20191112195655-aa38f8e97acc/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191114200427-caa0b0f7d508/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191216052735-49a3e744a425 h1:VvQyQJN0tSuecqgcIxMWnnfG5kSmgy9KZR9sW3W5QeA= golang.org/x/tools v0.0.0-20191216052735-49a3e744a425/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= @@ -1168,6 +1195,7 @@ google.golang.org/appengine v1.2.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7 google.golang.org/appengine v1.3.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.6.1 h1:QzqyMA1tlu6CgqCDUtU9V+ZKhLFT2dkJuANu5QaxI3I= google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/genproto v0.0.0-20180831171423-11092d34479b/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= @@ -1199,6 +1227,7 @@ google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2 gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/cheggaaa/pb.v1 v1.0.28/go.mod h1:V/YB90LKu/1FcN3WVnfiiE5oMCibMjukxqG/qStrOgw= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= @@ -1206,6 +1235,7 @@ gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMy gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw= gopkg.in/ini.v1 v1.51.0 h1:AQvPpx3LzTDM0AjnIRlVFwFFGC+npRopjZxLJj6gdno= gopkg.in/ini.v1 v1.51.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= +gopkg.in/natefinch/npipe.v2 v2.0.0-20160621034901-c1b8fa8bdcce h1:+JknDZhAj8YMt7GC73Ei8pv4MzjDUNPHgQWJdtMAaDU= gopkg.in/natefinch/npipe.v2 v2.0.0-20160621034901-c1b8fa8bdcce/go.mod h1:5AcXVHNjg+BDxry382+8OKon8SEWiKktQR07RKPsv1c= gopkg.in/olebedev/go-duktape.v3 v3.0.0-20190213234257-ec84240a7772/go.mod h1:uAJfkITjFhyEEuUfm7bsmCZRbW5WRq8s9EY8HZ6hCns= gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo= @@ -1229,6 +1259,7 @@ honnef.co/go/tools v0.0.0-20180728063816-88497007e858/go.mod h1:rf3lG4BRIbNafJWh honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.1-2019.2.3 h1:3JgtbtFHMiCmsznwGVTUWbgGov+pVqnlf1dEJTNAXeM= honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= From add6afadd76c9b29295226c97347cc567f60c9ed Mon Sep 17 00:00:00 2001 From: Ian Norden Date: Wed, 2 Sep 2020 14:13:51 -0500 Subject: [PATCH 12/12] fix subscribe command and client (remove rlp encoding of subscription params) --- cmd/serve.go | 6 +++--- cmd/subscribe.go | 6 +----- pkg/client/client.go | 6 ++++-- 3 files changed, 8 insertions(+), 10 deletions(-) diff --git a/cmd/serve.go b/cmd/serve.go index c3d6b4a1..caa937fc 100644 --- a/cmd/serve.go +++ b/cmd/serve.go @@ -77,17 +77,17 @@ func serve() { } func startServers(server s.Server, settings *s.Config) error { - logWithCommand.Debug("starting up IPC server") + logWithCommand.Info("starting up IPC server") _, _, err := rpc.StartIPCEndpoint(settings.IPCEndpoint, server.APIs()) if err != nil { return err } - logWithCommand.Debug("starting up WS server") + logWithCommand.Info("starting up WS server") _, _, err = rpc.StartWSEndpoint(settings.WSEndpoint, server.APIs(), []string{"vdb"}, nil, true) if err != nil { return err } - logWithCommand.Debug("starting up HTTP server") + logWithCommand.Info("starting up HTTP server") _, _, err = rpc.StartHTTPEndpoint(settings.HTTPEndpoint, server.APIs(), []string{"eth"}, nil, nil, rpc.HTTPTimeouts{}) return err } diff --git a/cmd/subscribe.go b/cmd/subscribe.go index abe5e93d..563ff4fa 100644 --- a/cmd/subscribe.go +++ b/cmd/subscribe.go @@ -70,11 +70,7 @@ func subscribe() { payloadChan := make(chan w.SubscriptionPayload, 20000) // Subscribe to the watcher service with the given config/filter parameters - rlpParams, err := rlp.EncodeToBytes(ethSubConfig) - if err != nil { - logWithCommand.Fatal(err) - } - sub, err := subClient.Stream(payloadChan, rlpParams) + sub, err := subClient.Stream(payloadChan, *ethSubConfig) if err != nil { logWithCommand.Fatal(err) } diff --git a/pkg/client/client.go b/pkg/client/client.go index 3ffb296f..729c1185 100644 --- a/pkg/client/client.go +++ b/pkg/client/client.go @@ -20,6 +20,8 @@ package client import ( "context" + "github.com/vulcanize/ipld-eth-server/pkg/eth" + "github.com/ethereum/go-ethereum/rpc" "github.com/vulcanize/ipld-eth-server/pkg/serve" @@ -38,6 +40,6 @@ func NewClient(c *rpc.Client) *Client { } // Stream is the main loop for subscribing to iplds from an ipld-eth-server server -func (c *Client) Stream(payloadChan chan serve.SubscriptionPayload, rlpParams []byte) (*rpc.ClientSubscription, error) { - return c.c.Subscribe(context.Background(), "vdb", payloadChan, "stream", rlpParams) +func (c *Client) Stream(payloadChan chan serve.SubscriptionPayload, params eth.SubscriptionSettings) (*rpc.ClientSubscription, error) { + return c.c.Subscribe(context.Background(), "vdb", payloadChan, "stream", params) }