diff --git a/.gitignore b/.gitignore index 4cefb254..738c34f2 100644 --- a/.gitignore +++ b/.gitignore @@ -7,7 +7,7 @@ Vagrantfile vagrant*.sh .vagrant test_scripts/ -vulcanizedb +ipfs-chain-watcher postgraphile/build/ postgraphile/node_modules/ postgraphile/package-lock.json diff --git a/.travis.yml b/.travis.yml index ad1505f7..7eb88c79 100644 --- a/.travis.yml +++ b/.travis.yml @@ -7,7 +7,7 @@ services: addons: ssh_known_hosts: arch1.vdb.to postgresql: '11.2' -go_import_path: github.com/vulcanize/vulcanizedb +go_import_path: github.com/vulcanize/ipfs-chain-watcher before_install: - openssl aes-256-cbc -K $encrypted_e1db309e8776_key -iv $encrypted_e1db309e8776_iv -in temp_rsa.enc -out temp_rsa -d diff --git a/Dockerfile b/Dockerfile index 4c853c43..70a95a7f 100644 --- a/Dockerfile +++ b/Dockerfile @@ -2,9 +2,9 @@ FROM golang:alpine as builder RUN apk --update --no-cache add make git g++ # Build statically linked vDB binary (wonky path because of Dep) -RUN mkdir -p /go/src/github.com/vulcanize/vulcanizedb -ADD . /go/src/github.com/vulcanize/vulcanizedb -WORKDIR /go/src/github.com/vulcanize/vulcanizedb +RUN mkdir -p /go/src/github.com/vulcanize/ipfs-chain-watcher +ADD . /go/src/github.com/vulcanize/ipfs-chain-watcher +WORKDIR /go/src/github.com/vulcanize/ipfs-chain-watcher RUN GCO_ENABLED=0 GOOS=linux go build -a -installsuffix cgo -ldflags '-extldflags "-static"' . # Build migration tool @@ -14,10 +14,10 @@ RUN GCO_ENABLED=0 GOOS=linux go build -a -installsuffix cgo -ldflags '-extldflag # Second stage FROM alpine -COPY --from=builder /go/src/github.com/vulcanize/vulcanizedb/vulcanizedb /app/vulcanizedb -COPY --from=builder /go/src/github.com/vulcanize/vulcanizedb/environments/staging.toml /app/environments/ -COPY --from=builder /go/src/github.com/vulcanize/vulcanizedb/dockerfiles/startup_script.sh /app/ -COPY --from=builder /go/src/github.com/vulcanize/vulcanizedb/db/migrations/* /app/ +COPY --from=builder /go/src/github.com/vulcanize/ipfs-chain-watcher/vulcanizedb /app/vulcanizedb +COPY --from=builder /go/src/github.com/vulcanize/ipfs-chain-watcher/environments/staging.toml /app/environments/ +COPY --from=builder /go/src/github.com/vulcanize/ipfs-chain-watcher/dockerfiles/startup_script.sh /app/ +COPY --from=builder /go/src/github.com/vulcanize/ipfs-chain-watcher/db/migrations/* /app/ COPY --from=builder /go/src/github.com/pressly/goose/cmd/goose/goose /app/goose WORKDIR /app diff --git a/README.md b/README.md index 179c1943..fe2c90b7 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@ # Vulcanize DB [![Build Status](https://travis-ci.org/vulcanize/vulcanizedb.svg?branch=master)](https://travis-ci.org/vulcanize/vulcanizedb) -[![Go Report Card](https://goreportcard.com/badge/github.com/vulcanize/vulcanizedb)](https://goreportcard.com/report/github.com/vulcanize/vulcanizedb) +[![Go Report Card](https://goreportcard.com/badge/github.com/vulcanize/ipfs-chain-watcher)](https://goreportcard.com/report/github.com/vulcanize/ipfs-chain-watcher) > Vulcanize DB is a set of tools that make it easier for developers to write application-specific indexes and caches for dapps built on Ethereum. @@ -44,11 +44,11 @@ data from VulcanizeDB's underlying Postgres database and making it accessible. ### Building the project Download the codebase to your local `GOPATH` via: -`go get github.com/vulcanize/vulcanizedb` +`go get github.com/vulcanize/ipfs-chain-watcher` Move to the project directory: -`cd $GOPATH/src/github.com/vulcanize/vulcanizedb` +`cd $GOPATH/src/github.com/vulcanize/ipfs-chain-watcher` Be sure you have enabled Go Modules (`export GO111MODULE=on`), and build the executable with: @@ -65,7 +65,7 @@ It can be additionally helpful to add `$GOPATH/bin` to your shell's `$PATH`. 1. Install Postgres 1. Create a superuser for yourself and make sure `psql --list` works without prompting for a password. 1. `createdb vulcanize_public` -1. `cd $GOPATH/src/github.com/vulcanize/vulcanizedb` +1. `cd $GOPATH/src/github.com/vulcanize/ipfs-chain-watcher` 1. Run the migrations: `make migrate HOST_NAME=localhost NAME=vulcanize_public PORT=5432` - There is an optional var `USER=username` if the database user is not the default user `postgres` - To rollback a single step: `make rollback NAME=vulcanize_public` diff --git a/cmd/compose.go b/cmd/compose.go deleted file mode 100644 index d82a77c1..00000000 --- a/cmd/compose.go +++ /dev/null @@ -1,193 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package cmd - -import ( - "errors" - "fmt" - "strconv" - - log "github.com/sirupsen/logrus" - "github.com/spf13/cobra" - "github.com/spf13/viper" - - "github.com/vulcanize/vulcanizedb/pkg/config" - p2 "github.com/vulcanize/vulcanizedb/pkg/plugin" -) - -// composeCmd represents the compose command -var composeCmd = &cobra.Command{ - Use: "compose", - Short: "Composes transformer initializer plugin", - Long: `This command needs a config .toml file of form: - -[database] - name = "vulcanize_public" - hostname = "localhost" - user = "vulcanize" - password = "vulcanize" - port = 5432 - -[client] - ipcPath = "/Users/user/Library/Ethereum/geth.ipc" - -[exporter] - home = "github.com/vulcanize/vulcanizedb" - name = "exampleTransformerExporter" - save = false - transformerNames = [ - "transformer1", - "transformer2", - "transformer3", - "transformer4", - ] - [exporter.transformer1] - path = "path/to/transformer1" - type = "eth_event" - repository = "github.com/account/repo" - migrations = "db/migrations" - rank = "0" - [exporter.transformer2] - path = "path/to/transformer2" - type = "eth_contract" - repository = "github.com/account/repo" - migrations = "db/migrations" - rank = "0" - [exporter.transformer3] - path = "path/to/transformer3" - type = "eth_event" - repository = "github.com/account/repo" - migrations = "db/migrations" - rank = "0" - [exporter.transformer4] - path = "path/to/transformer4" - type = "eth_storage" - repository = "github.com/account2/repo2" - migrations = "to/db/migrations" - rank = "1" - - -Note: If any of the plugin transformer need additional -configuration variables include them in the .toml file as well - -This information is used to write and build a go plugin with a transformer -set composed from the transformer imports specified in the config file -This plugin is loaded and the set of transformer initializers is exported -from it and loaded into and executed over by the appropriate watcher. - -The type of watcher that the transformer works with is specified using the -type variable for each transformer in the config. Currently there are watchers -of event data from an eth node (eth_event) and storage data from an eth node -(eth_storage), and a more generic interface for accepting contract_watcher pkg -based transformers which can perform both event watching and public method -polling (eth_contract). - -Transformers of different types can be ran together in the same command using a -single config file or in separate command instances using different config files - -Specify config location when executing the command: -./vulcanizedb compose --config=./environments/config_name.toml`, - Run: func(cmd *cobra.Command, args []string) { - subCommand = cmd.CalledAs() - logWithCommand = *log.WithField("SubCommand", subCommand) - compose() - }, -} - -func compose() { - // Build plugin generator config - prepConfig() - - // Generate code to build the plugin according to the config file - logWithCommand.Info("generating plugin") - generator, err := p2.NewGenerator(genConfig, databaseConfig) - if err != nil { - logWithCommand.Debug("initializing plugin generator failed") - logWithCommand.Fatal(err) - } - err = generator.GenerateExporterPlugin() - if err != nil { - logWithCommand.Debug("generating plugin failed") - logWithCommand.Fatal(err) - } - // TODO: Embed versioning info in the .so files so we know which version of vulcanizedb to run them with - _, pluginPath, err := genConfig.GetPluginPaths() - if err != nil { - logWithCommand.Debug("getting plugin path failed") - logWithCommand.Fatal(err) - } - fmt.Printf("Composed plugin %s", pluginPath) - logWithCommand.Info("plugin .so file output to ", pluginPath) -} - -func init() { - rootCmd.AddCommand(composeCmd) -} - -func prepConfig() { - logWithCommand.Info("configuring plugin") - names := viper.GetStringSlice("exporter.transformerNames") - transformers := make(map[string]config.Transformer) - for _, name := range names { - logWithCommand.Debug("Configuring " + name + " transformer") - transformer := viper.GetStringMapString("exporter." + name) - p, pOK := transformer["path"] - if !pOK || p == "" { - logWithCommand.Fatal(name, " transformer config is missing `path` value") - } - r, rOK := transformer["repository"] - if !rOK || r == "" { - logWithCommand.Fatal(name, " transformer config is missing `repository` value") - } - m, mOK := transformer["migrations"] - if !mOK || m == "" { - logWithCommand.Fatal(name, " transformer config is missing `migrations` value") - } - mr, mrOK := transformer["rank"] - if !mrOK || mr == "" { - logWithCommand.Fatal(name, " transformer config is missing `rank` value") - } - rank, err := strconv.ParseUint(mr, 10, 64) - if err != nil { - logWithCommand.Fatal(name, " migration `rank` can't be converted to an unsigned integer") - } - t, tOK := transformer["type"] - if !tOK { - logWithCommand.Fatal(name, " transformer config is missing `type` value") - } - transformerType := config.GetTransformerType(t) - if transformerType == config.UnknownTransformerType { - logWithCommand.Fatal(errors.New(`unknown transformer type in exporter config accepted types are "eth_event", "eth_storage"`)) - } - - transformers[name] = config.Transformer{ - Path: p, - Type: transformerType, - RepositoryPath: r, - MigrationPath: m, - MigrationRank: rank, - } - } - - genConfig = config.Plugin{ - Transformers: transformers, - FilePath: "$GOPATH/src/github.com/vulcanize/vulcanizedb/plugins", - FileName: viper.GetString("exporter.name"), - Save: viper.GetBool("exporter.save"), - Home: viper.GetString("exporter.home"), - } -} diff --git a/cmd/composeAndExecute.go b/cmd/composeAndExecute.go deleted file mode 100644 index 9b8219bd..00000000 --- a/cmd/composeAndExecute.go +++ /dev/null @@ -1,217 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package cmd - -import ( - "os" - "plugin" - syn "sync" - "time" - - log "github.com/sirupsen/logrus" - "github.com/spf13/cobra" - - "github.com/vulcanize/vulcanizedb/libraries/shared/fetcher" - "github.com/vulcanize/vulcanizedb/libraries/shared/streamer" - "github.com/vulcanize/vulcanizedb/libraries/shared/watcher" - "github.com/vulcanize/vulcanizedb/pkg/fs" - p2 "github.com/vulcanize/vulcanizedb/pkg/plugin" - "github.com/vulcanize/vulcanizedb/pkg/plugin/helpers" - "github.com/vulcanize/vulcanizedb/utils" -) - -// composeAndExecuteCmd represents the composeAndExecute command -var composeAndExecuteCmd = &cobra.Command{ - Use: "composeAndExecute", - Short: "Composes, loads, and executes transformer initializer plugin", - Long: `This command needs a config .toml file of form: - -[database] - name = "vulcanize_public" - hostname = "localhost" - user = "vulcanize" - password = "vulcanize" - port = 5432 - -[client] - ipcPath = "/Users/user/Library/Ethereum/geth.ipc" - -[exporter] - home = "github.com/vulcanize/vulcanizedb" - name = "exampleTransformerExporter" - save = false - transformerNames = [ - "transformer1", - "transformer2", - "transformer3", - "transformer4", - ] - [exporter.transformer1] - path = "path/to/transformer1" - type = "eth_event" - repository = "github.com/account/repo" - migrations = "db/migrations" - rank = "0" - [exporter.transformer2] - path = "path/to/transformer2" - type = "eth_contract" - repository = "github.com/account/repo" - migrations = "db/migrations" - rank = "2" - [exporter.transformer3] - path = "path/to/transformer3" - type = "eth_event" - repository = "github.com/account/repo" - migrations = "db/migrations" - rank = "0" - [exporter.transformer4] - path = "path/to/transformer4" - type = "eth_storage" - repository = "github.com/account2/repo2" - migrations = "to/db/migrations" - rank = "1" - - -Note: If any of the plugin transformer need additional -configuration variables include them in the .toml file as well - -This information is used to write and build a go plugin with a transformer -set composed from the transformer imports specified in the config file -This plugin is loaded and the set of transformer initializers is exported -from it and loaded into and executed over by the appropriate watcher. - -The type of watcher that the transformer works with is specified using the -type variable for each transformer in the config. Currently there are watchers -of event data from an eth node (eth_event) and storage data from an eth node -(eth_storage), and a more generic interface for accepting contract_watcher pkg -based transformers which can perform both event watching and public method -polling (eth_contract). - -Transformers of different types can be ran together in the same command using a -single config file or in separate command instances using different config files - -Specify config location when executing the command: -./vulcanizedb composeAndExecute --config=./environments/config_name.toml`, - Run: func(cmd *cobra.Command, args []string) { - subCommand = cmd.CalledAs() - logWithCommand = *log.WithField("SubCommand", subCommand) - composeAndExecute() - }, -} - -func composeAndExecute() { - // Build plugin generator config - prepConfig() - - // Generate code to build the plugin according to the config file - logWithCommand.Info("generating plugin") - generator, err := p2.NewGenerator(genConfig, databaseConfig) - if err != nil { - logWithCommand.Fatal(err) - } - err = generator.GenerateExporterPlugin() - if err != nil { - logWithCommand.Debug("generating plugin failed") - logWithCommand.Fatal(err) - } - - // Get the plugin path and load the plugin - _, pluginPath, err := genConfig.GetPluginPaths() - if err != nil { - logWithCommand.Fatal(err) - } - if !genConfig.Save { - defer helpers.ClearFiles(pluginPath) - } - logWithCommand.Info("linking plugin ", pluginPath) - plug, err := plugin.Open(pluginPath) - if err != nil { - logWithCommand.Debug("linking plugin failed") - logWithCommand.Fatal(err) - } - - // Load the `Exporter` symbol from the plugin - logWithCommand.Info("loading transformers from plugin") - symExporter, err := plug.Lookup("Exporter") - if err != nil { - logWithCommand.Debug("loading Exporter symbol failed") - logWithCommand.Fatal(err) - } - - // Assert that the symbol is of type Exporter - exporter, ok := symExporter.(Exporter) - if !ok { - logWithCommand.Debug("plugged-in symbol not of type Exporter") - os.Exit(1) - } - - // Use the Exporters export method to load the EventTransformerInitializer, StorageTransformerInitializer, and ContractTransformerInitializer sets - ethEventInitializers, ethStorageInitializers, ethContractInitializers := exporter.Export() - - // Setup bc and db objects - blockChain := getBlockChain() - db := utils.LoadPostgres(databaseConfig, blockChain.Node()) - - // Execute over transformer sets returned by the exporter - // Use WaitGroup to wait on both goroutines - var wg syn.WaitGroup - if len(ethEventInitializers) > 0 { - ew := watcher.NewEventWatcher(&db, blockChain) - err := ew.AddTransformers(ethEventInitializers) - if err != nil { - logWithCommand.Fatalf("failed to add event transformer initializers to watcher: %s", err.Error()) - } - wg.Add(1) - go watchEthEvents(&ew, &wg) - } - - if len(ethStorageInitializers) > 0 { - switch storageDiffsSource { - case "geth": - log.Debug("fetching storage diffs from geth pub sub") - rpcClient, _ := getClients() - stateDiffStreamer := streamer.NewStateDiffStreamer(rpcClient) - storageFetcher := fetcher.NewGethRPCStorageFetcher(stateDiffStreamer) - sw := watcher.NewStorageWatcher(storageFetcher, &db) - sw.AddTransformers(ethStorageInitializers) - wg.Add(1) - go watchEthStorage(sw, &wg) - default: - log.Debug("fetching storage diffs from csv") - tailer := fs.FileTailer{Path: storageDiffsPath} - storageFetcher := fetcher.NewCsvTailStorageFetcher(tailer) - sw := watcher.NewStorageWatcher(storageFetcher, &db) - sw.AddTransformers(ethStorageInitializers) - wg.Add(1) - go watchEthStorage(sw, &wg) - } - } - - if len(ethContractInitializers) > 0 { - gw := watcher.NewContractWatcher(&db, blockChain) - gw.AddTransformers(ethContractInitializers) - wg.Add(1) - go watchEthContract(&gw, &wg) - } - wg.Wait() -} - -func init() { - rootCmd.AddCommand(composeAndExecuteCmd) - composeAndExecuteCmd.Flags().BoolVarP(&recheckHeadersArg, "recheck-headers", "r", false, "whether to re-check headers for watched events") - composeAndExecuteCmd.Flags().DurationVarP(&queueRecheckInterval, "queue-recheck-interval", "q", 5*time.Minute, "interval duration for rechecking queued storage diffs (ex: 5m30s)") -} diff --git a/cmd/contractWatcher.go b/cmd/contractWatcher.go deleted file mode 100644 index dd7759a0..00000000 --- a/cmd/contractWatcher.go +++ /dev/null @@ -1,118 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package cmd - -import ( - "fmt" - "time" - - log "github.com/sirupsen/logrus" - "github.com/spf13/cobra" - - st "github.com/vulcanize/vulcanizedb/libraries/shared/transformer" - "github.com/vulcanize/vulcanizedb/pkg/config" - ht "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/header/transformer" - "github.com/vulcanize/vulcanizedb/utils" -) - -// contractWatcherCmd represents the contractWatcher command -var contractWatcherCmd = &cobra.Command{ - Use: "contractWatcher", - Short: "Watches events at the provided contract address using fully synced vDB", - Long: `Uses input contract address and event filters to watch events - -Expects an ethereum node to be running -Expects an archival node synced into vulcanizeDB -Requires a .toml config file: - - [database] - name = "vulcanize_public" - hostname = "localhost" - port = 5432 - - [client] - ipcPath = "/Users/user/Library/Ethereum/geth.ipc" - - [contract] - network = "" - addresses = [ - "contractAddress1", - "contractAddress2" - ] - [contract.contractAddress1] - abi = 'ABI for contract 1' - startingBlock = 982463 - [contract.contractAddress2] - abi = 'ABI for contract 2' - events = [ - "event1", - "event2" - ] - eventArgs = [ - "arg1", - "arg2" - ] - methods = [ - "method1", - "method2" - ] - methodArgs = [ - "arg1", - "arg2" - ] - startingBlock = 4448566 - piping = true -`, - Run: func(cmd *cobra.Command, args []string) { - subCommand = cmd.CalledAs() - logWithCommand = *log.WithField("SubCommand", subCommand) - contractWatcher() - }, -} - -var ( - mode string -) - -func contractWatcher() { - ticker := time.NewTicker(5 * time.Second) - defer ticker.Stop() - - blockChain := getBlockChain() - db := utils.LoadPostgres(databaseConfig, blockChain.Node()) - - var t st.ContractTransformer - con := config.ContractConfig{} - con.PrepConfig() - t = ht.NewTransformer(con, blockChain, &db) - - err := t.Init() - if err != nil { - logWithCommand.Fatal(fmt.Sprintf("Failed to initialize transformer, err: %v ", err)) - } - - for range ticker.C { - err = t.Execute() - if err != nil { - logWithCommand.Error("Execution error for transformer: ", t.GetConfig().Name, err) - } - } -} - -func init() { - rootCmd.AddCommand(contractWatcherCmd) -} diff --git a/cmd/execute.go b/cmd/execute.go deleted file mode 100644 index 53e87938..00000000 --- a/cmd/execute.go +++ /dev/null @@ -1,207 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package cmd - -import ( - "fmt" - "plugin" - syn "sync" - "time" - - log "github.com/sirupsen/logrus" - "github.com/spf13/cobra" - "github.com/spf13/viper" - - "github.com/vulcanize/vulcanizedb/libraries/shared/constants" - "github.com/vulcanize/vulcanizedb/libraries/shared/fetcher" - "github.com/vulcanize/vulcanizedb/libraries/shared/storage" - "github.com/vulcanize/vulcanizedb/libraries/shared/streamer" - "github.com/vulcanize/vulcanizedb/libraries/shared/transformer" - "github.com/vulcanize/vulcanizedb/libraries/shared/watcher" - "github.com/vulcanize/vulcanizedb/pkg/fs" - "github.com/vulcanize/vulcanizedb/utils" -) - -// executeCmd represents the execute command -var executeCmd = &cobra.Command{ - Use: "execute", - Short: "executes a precomposed transformer initializer plugin", - Long: `This command needs a config .toml file of form: -[database] - name = "vulcanize_public" - hostname = "localhost" - user = "vulcanize" - password = "vulcanize" - port = 5432 -[client] - ipcPath = "/Users/user/Library/Ethereum/geth.ipc" -[exporter] - name = "exampleTransformerExporter" -Note: If any of the plugin transformer need additional -configuration variables include them in the .toml file as well -The exporter.name is the name (without extension) of the plugin to be loaded. -The plugin file needs to be located in the /plugins directory and this command assumes -the db migrations remain from when the plugin was composed. Additionally, the plugin -must have been composed by the same version of vulcanizedb or else it will not be compatible. -Specify config location when executing the command: -./vulcanizedb execute --config=./environments/config_name.toml`, - Run: func(cmd *cobra.Command, args []string) { - subCommand = cmd.CalledAs() - logWithCommand = *log.WithField("SubCommand", subCommand) - execute() - }, -} - -func execute() { - // Build plugin generator config - prepConfig() - - // Get the plugin path and load the plugin - _, pluginPath, err := genConfig.GetPluginPaths() - if err != nil { - logWithCommand.Fatal(err) - } - - fmt.Printf("Executing plugin %s", pluginPath) - logWithCommand.Info("linking plugin ", pluginPath) - plug, err := plugin.Open(pluginPath) - if err != nil { - logWithCommand.Warn("linking plugin failed") - logWithCommand.Fatal(err) - } - - // Load the `Exporter` symbol from the plugin - logWithCommand.Info("loading transformers from plugin") - symExporter, err := plug.Lookup("Exporter") - if err != nil { - logWithCommand.Warn("loading Exporter symbol failed") - logWithCommand.Fatal(err) - } - - // Assert that the symbol is of type Exporter - exporter, ok := symExporter.(Exporter) - if !ok { - logWithCommand.Fatal("plugged-in symbol not of type Exporter") - } - - // Use the Exporters export method to load the EventTransformerInitializer, StorageTransformerInitializer, and ContractTransformerInitializer sets - ethEventInitializers, ethStorageInitializers, ethContractInitializers := exporter.Export() - - // Setup bc and db objects - blockChain := getBlockChain() - db := utils.LoadPostgres(databaseConfig, blockChain.Node()) - - // Execute over transformer sets returned by the exporter - // Use WaitGroup to wait on both goroutines - var wg syn.WaitGroup - if len(ethEventInitializers) > 0 { - ew := watcher.NewEventWatcher(&db, blockChain) - err = ew.AddTransformers(ethEventInitializers) - if err != nil { - logWithCommand.Fatalf("failed to add event transformer initializers to watcher: %s", err.Error()) - } - wg.Add(1) - go watchEthEvents(&ew, &wg) - } - - if len(ethStorageInitializers) > 0 { - switch storageDiffsSource { - case "geth": - log.Debug("fetching storage diffs from geth pub sub") - wsClient := getWSClient() - stateDiffStreamer := streamer.NewStateDiffStreamer(wsClient) - storageFetcher := fetcher.NewGethRPCStorageFetcher(stateDiffStreamer) - sw := watcher.NewStorageWatcher(storageFetcher, &db) - sw.AddTransformers(ethStorageInitializers) - wg.Add(1) - go watchEthStorage(sw, &wg) - default: - log.Debug("fetching storage diffs from csv") - tailer := fs.FileTailer{Path: storageDiffsPath} - storageFetcher := fetcher.NewCsvTailStorageFetcher(tailer) - sw := watcher.NewStorageWatcher(storageFetcher, &db) - sw.AddTransformers(ethStorageInitializers) - wg.Add(1) - go watchEthStorage(sw, &wg) - } - } - - if len(ethContractInitializers) > 0 { - gw := watcher.NewContractWatcher(&db, blockChain) - gw.AddTransformers(ethContractInitializers) - wg.Add(1) - go watchEthContract(&gw, &wg) - } - wg.Wait() -} - -func init() { - rootCmd.AddCommand(executeCmd) - executeCmd.Flags().BoolVarP(&recheckHeadersArg, "recheck-headers", "r", false, "whether to re-check headers for watched events") - executeCmd.Flags().DurationVarP(&queueRecheckInterval, "queue-recheck-interval", "q", 5*time.Minute, "interval duration for rechecking queued storage diffs (ex: 5m30s)") -} - -type Exporter interface { - Export() ([]transformer.EventTransformerInitializer, []transformer.StorageTransformerInitializer, []transformer.ContractTransformerInitializer) -} - -func watchEthEvents(w *watcher.EventWatcher, wg *syn.WaitGroup) { - defer wg.Done() - // Execute over the EventTransformerInitializer set using the watcher - logWithCommand.Info("executing event transformers") - var recheck constants.TransformerExecution - if recheckHeadersArg { - recheck = constants.HeaderRecheck - } else { - recheck = constants.HeaderUnchecked - } - err := w.Execute(recheck) - if err != nil { - logWithCommand.Fatalf("error executing event watcher: %s", err.Error()) - } -} - -func watchEthStorage(w watcher.IStorageWatcher, wg *syn.WaitGroup) { - defer wg.Done() - // Execute over the StorageTransformerInitializer set using the storage watcher - logWithCommand.Info("executing storage transformers") - on := viper.GetBool("storageBackFill.on") - if on { - backFillStorage(w) - } - w.Execute(queueRecheckInterval, on) -} - -func backFillStorage(w watcher.IStorageWatcher) { - rpcClient, _ := getClients() - // find min deployment block - minDeploymentBlock := constants.GetMinDeploymentBlock() - stateDiffFetcher := fetcher.NewStateDiffFetcher(rpcClient) - backFiller := storage.NewStorageBackFiller(stateDiffFetcher, storage.DefaultMaxBatchSize) - go w.BackFill(minDeploymentBlock, backFiller) -} - -func watchEthContract(w *watcher.ContractWatcher, wg *syn.WaitGroup) { - defer wg.Done() - // Execute over the ContractTransformerInitializer set using the contract watcher - logWithCommand.Info("executing contract_watcher transformers") - ticker := time.NewTicker(pollingInterval) - defer ticker.Stop() - for range ticker.C { - w.Execute() - } -} diff --git a/cmd/headerSync.go b/cmd/headerSync.go deleted file mode 100644 index e08e68d0..00000000 --- a/cmd/headerSync.go +++ /dev/null @@ -1,114 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package cmd - -import ( - "time" - - log "github.com/sirupsen/logrus" - "github.com/spf13/cobra" - - "github.com/vulcanize/vulcanizedb/pkg/eth" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" - "github.com/vulcanize/vulcanizedb/pkg/eth/datastore" - "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres/repositories" - "github.com/vulcanize/vulcanizedb/pkg/eth/history" - "github.com/vulcanize/vulcanizedb/utils" -) - -// headerSyncCmd represents the headerSync command -var headerSyncCmd = &cobra.Command{ - Use: "headerSync", - Short: "Syncs VulcanizeDB with local ethereum node's block headers", - Long: `Syncs VulcanizeDB with local ethereum node. Populates -Postgres with block headers. - -./vulcanizedb headerSync --starting-block-number 0 --config public.toml - -Expects ethereum node to be running and requires a .toml config: - - [database] - name = "vulcanize_public" - hostname = "localhost" - port = 5432 - - [client] - ipcPath = "/Users/user/Library/Ethereum/geth.ipc" -`, - Run: func(cmd *cobra.Command, args []string) { - subCommand = cmd.CalledAs() - logWithCommand = *log.WithField("SubCommand", subCommand) - headerSync() - }, -} - -func init() { - rootCmd.AddCommand(headerSyncCmd) - headerSyncCmd.Flags().Int64VarP(&startingBlockNumber, "starting-block-number", "s", 0, "Block number to start syncing from") -} - -func backFillAllHeaders(blockchain core.BlockChain, headerRepository datastore.HeaderRepository, missingBlocksPopulated chan int, startingBlockNumber int64) { - populated, err := history.PopulateMissingHeaders(blockchain, headerRepository, startingBlockNumber) - if err != nil { - // TODO Lots of possible errors in the call stack above. If errors occur, we still put - // 0 in the channel, triggering another round - logWithCommand.Error("backfillAllHeaders: Error populating headers: ", err) - } - missingBlocksPopulated <- populated -} - -func headerSync() { - ticker := time.NewTicker(pollingInterval) - defer ticker.Stop() - blockChain := getBlockChain() - validateArgs(blockChain) - db := utils.LoadPostgres(databaseConfig, blockChain.Node()) - - headerRepository := repositories.NewHeaderRepository(&db) - validator := history.NewHeaderValidator(blockChain, headerRepository, validationWindow) - missingBlocksPopulated := make(chan int) - go backFillAllHeaders(blockChain, headerRepository, missingBlocksPopulated, startingBlockNumber) - - for { - select { - case <-ticker.C: - window, err := validator.ValidateHeaders() - if err != nil { - logWithCommand.Error("headerSync: ValidateHeaders failed: ", err) - } - logWithCommand.Debug(window.GetString()) - case n := <-missingBlocksPopulated: - if n == 0 { - time.Sleep(3 * time.Second) - } - go backFillAllHeaders(blockChain, headerRepository, missingBlocksPopulated, startingBlockNumber) - } - } -} - -func validateArgs(blockChain *eth.BlockChain) { - lastBlock, err := blockChain.LastBlock() - if err != nil { - logWithCommand.Error("validateArgs: Error getting last block: ", err) - } - if lastBlock.Int64() == 0 { - logWithCommand.Fatal("geth initial: state sync not finished") - } - if startingBlockNumber > lastBlock.Int64() { - logWithCommand.Fatal("starting block number > current block number") - } -} diff --git a/cmd/resync.go b/cmd/resync.go index a2f6cf65..1169b7b4 100644 --- a/cmd/resync.go +++ b/cmd/resync.go @@ -19,11 +19,11 @@ import ( log "github.com/sirupsen/logrus" "github.com/spf13/cobra" "github.com/spf13/viper" - "github.com/vulcanize/vulcanizedb/pkg/super_node/shared" + "github.com/vulcanize/ipfs-chain-watcher/pkg/shared" - "github.com/vulcanize/vulcanizedb/pkg/ipfs" - "github.com/vulcanize/vulcanizedb/pkg/super_node/resync" - v "github.com/vulcanize/vulcanizedb/version" + "github.com/vulcanize/ipfs-chain-watcher/pkg/ipfs" + "github.com/vulcanize/ipfs-chain-watcher/pkg/resync" + v "github.com/vulcanize/ipfs-chain-watcher/version" ) // resyncCmd represents the resync command diff --git a/cmd/root.go b/cmd/root.go index 054e4284..054e3073 100644 --- a/cmd/root.go +++ b/cmd/root.go @@ -17,38 +17,24 @@ package cmd import ( - "errors" "fmt" "os" "strings" "time" - "github.com/ethereum/go-ethereum/ethclient" - "github.com/ethereum/go-ethereum/rpc" log "github.com/sirupsen/logrus" "github.com/spf13/cobra" "github.com/spf13/viper" - "github.com/vulcanize/vulcanizedb/pkg/config" - "github.com/vulcanize/vulcanizedb/pkg/eth" - "github.com/vulcanize/vulcanizedb/pkg/eth/client" - vRpc "github.com/vulcanize/vulcanizedb/pkg/eth/converters/rpc" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" - "github.com/vulcanize/vulcanizedb/pkg/eth/node" + "github.com/vulcanize/ipfs-chain-watcher/pkg/config" ) var ( - cfgFile string - databaseConfig config.Database - genConfig config.Plugin - ipc string - queueRecheckInterval time.Duration - startingBlockNumber int64 - storageDiffsPath string - recheckHeadersArg bool - subCommand string - logWithCommand log.Entry - storageDiffsSource string + cfgFile string + databaseConfig config.Database + ipc string + subCommand string + logWithCommand log.Entry ) const ( @@ -91,8 +77,6 @@ func initFuncs(cmd *cobra.Command, args []string) { func setViperConfigs() { ipc = viper.GetString("client.ipcpath") - storageDiffsPath = viper.GetString("filesystem.storageDiffsPath") - storageDiffsSource = viper.GetString("storageDiffs.source") databaseConfig = config.Database{ Name: viper.GetString("database.name"), Hostname: viper.GetString("database.hostname"), @@ -130,10 +114,6 @@ func init() { rootCmd.PersistentFlags().String("database-user", "", "database user") rootCmd.PersistentFlags().String("database-password", "", "database password") rootCmd.PersistentFlags().String("client-ipcPath", "", "location of geth.ipc file") - rootCmd.PersistentFlags().String("client-levelDbPath", "", "location of levelDb chaindata") - rootCmd.PersistentFlags().String("filesystem-storageDiffsPath", "", "location of storage diffs csv file") - rootCmd.PersistentFlags().String("storageDiffs-source", "csv", "where to get the state diffs: csv or geth") - rootCmd.PersistentFlags().String("exporter-name", "exporter", "name of exporter plugin") rootCmd.PersistentFlags().String("log-level", log.InfoLevel.String(), "Log level (trace, debug, info, warn, error, fatal, panic") viper.BindPFlag("logfile", rootCmd.PersistentFlags().Lookup("logfile")) @@ -143,10 +123,6 @@ func init() { viper.BindPFlag("database.user", rootCmd.PersistentFlags().Lookup("database-user")) viper.BindPFlag("database.password", rootCmd.PersistentFlags().Lookup("database-password")) viper.BindPFlag("client.ipcPath", rootCmd.PersistentFlags().Lookup("client-ipcPath")) - viper.BindPFlag("client.levelDbPath", rootCmd.PersistentFlags().Lookup("client-levelDbPath")) - viper.BindPFlag("filesystem.storageDiffsPath", rootCmd.PersistentFlags().Lookup("filesystem-storageDiffsPath")) - viper.BindPFlag("storageDiffs.source", rootCmd.PersistentFlags().Lookup("storageDiffs-source")) - viper.BindPFlag("exporter.fileName", rootCmd.PersistentFlags().Lookup("exporter-name")) viper.BindPFlag("log.level", rootCmd.PersistentFlags().Lookup("log-level")) } @@ -162,35 +138,3 @@ func initConfig() { log.Warn("No config file passed with --config flag") } } - -func getBlockChain() *eth.BlockChain { - rpcClient, ethClient := getClients() - vdbEthClient := client.NewEthClient(ethClient) - vdbNode := node.MakeNode(rpcClient) - transactionConverter := vRpc.NewRPCTransactionConverter(ethClient) - return eth.NewBlockChain(vdbEthClient, rpcClient, vdbNode, transactionConverter) -} - -func getClients() (client.RPCClient, *ethclient.Client) { - rawRPCClient, err := rpc.Dial(ipc) - - if err != nil { - logWithCommand.Fatal(err) - } - rpcClient := client.NewRPCClient(rawRPCClient, ipc) - ethClient := ethclient.NewClient(rawRPCClient) - - return rpcClient, ethClient -} - -func getWSClient() core.RPCClient { - wsRPCpath := viper.GetString("client.wsPath") - if wsRPCpath == "" { - logWithCommand.Fatal(errors.New("getWSClient() was called but no ws rpc path is provided")) - } - wsRPCClient, dialErr := rpc.Dial(wsRPCpath) - if dialErr != nil { - logWithCommand.Fatal(dialErr) - } - return client.NewRPCClient(wsRPCClient, wsRPCpath) -} diff --git a/cmd/streamEthSubscribe.go b/cmd/streamEthSubscribe.go index 17f85767..30d581ec 100644 --- a/cmd/streamEthSubscribe.go +++ b/cmd/streamEthSubscribe.go @@ -28,11 +28,11 @@ import ( "github.com/spf13/cobra" "github.com/spf13/viper" - "github.com/vulcanize/vulcanizedb/libraries/shared/streamer" - "github.com/vulcanize/vulcanizedb/pkg/eth/client" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" - "github.com/vulcanize/vulcanizedb/pkg/super_node" - "github.com/vulcanize/vulcanizedb/pkg/super_node/eth" + "github.com/vulcanize/ipfs-chain-watcher/pkg/client" + "github.com/vulcanize/ipfs-chain-watcher/pkg/core" + "github.com/vulcanize/ipfs-chain-watcher/pkg/eth" + "github.com/vulcanize/ipfs-chain-watcher/pkg/streamer" + "github.com/vulcanize/ipfs-chain-watcher/pkg/watcher" ) // streamEthSubscriptionCmd represents the streamEthSubscription command @@ -64,7 +64,7 @@ func streamEthSubscription() { str := streamer.NewSuperNodeStreamer(rpcClient) // Buffered channel for reading subscription payloads - payloadChan := make(chan super_node.SubscriptionPayload, 20000) + payloadChan := make(chan watcher.SubscriptionPayload, 20000) // Subscribe to the super node service with the given config/filter parameters rlpParams, err := rlp.EncodeToBytes(ethSubConfig) diff --git a/cmd/version.go b/cmd/version.go index cc35f32e..ea1175e0 100644 --- a/cmd/version.go +++ b/cmd/version.go @@ -19,20 +19,20 @@ import ( log "github.com/sirupsen/logrus" "github.com/spf13/cobra" - v "github.com/vulcanize/vulcanizedb/version" + v "github.com/vulcanize/ipfs-chain-watcher/version" ) // versionCmd represents the version command var versionCmd = &cobra.Command{ Use: "version", - Short: "Prints the version of vulcanizeDB", - Long: `Use this command to fetch the version of vulcanizeDB + Short: "Prints the version of ipfs-chain-watcher", + Long: `Use this command to fetch the version of ipfs-chain-watcher -Usage: ./vulcanizedb version`, +Usage: ./ipfs-chain-watcher version`, Run: func(cmd *cobra.Command, args []string) { subCommand = cmd.CalledAs() logWithCommand = *log.WithField("SubCommand", subCommand) - logWithCommand.Infof("VulcanizeDB version: %s", v.VersionWithMeta) + logWithCommand.Infof("ipfs-chain-watcher version: %s", v.VersionWithMeta) }, } diff --git a/cmd/superNode.go b/cmd/watch.go similarity index 92% rename from cmd/superNode.go rename to cmd/watch.go index a72dd6a9..db85ddce 100644 --- a/cmd/superNode.go +++ b/cmd/watch.go @@ -25,17 +25,17 @@ import ( "github.com/spf13/cobra" "github.com/spf13/viper" - "github.com/vulcanize/vulcanizedb/pkg/ipfs" - "github.com/vulcanize/vulcanizedb/pkg/super_node" - "github.com/vulcanize/vulcanizedb/pkg/super_node/shared" - v "github.com/vulcanize/vulcanizedb/version" + "github.com/vulcanize/ipfs-chain-watcher/pkg/ipfs" + "github.com/vulcanize/ipfs-chain-watcher/pkg/shared" + "github.com/vulcanize/ipfs-chain-watcher/pkg/watcher" + v "github.com/vulcanize/ipfs-chain-watcher/version" ) // superNodeCmd represents the superNode command var superNodeCmd = &cobra.Command{ Use: "superNode", - Short: "VulcanizeDB SuperNode", - Long: `This command configures a VulcanizeDB SuperNode. + Short: "sync chain data into PG-IPFS", + Long: `This command configures a VulcanizeDB ipfs-chain-watcher. The Sync process streams all chain data from the appropriate chain, processes this data into IPLD objects and publishes them to IPFS. It then indexes the CIDs against useful data fields/metadata in Postgres. @@ -56,7 +56,7 @@ and fill in gaps in the data func superNode() { logWithCommand.Infof("running vdb version: %s", v.VersionWithMeta) logWithCommand.Debug("loading super node configuration variables") - superNodeConfig, err := super_node.NewSuperNodeConfig() + superNodeConfig, err := watcher.NewSuperNodeConfig() if err != nil { logWithCommand.Fatal(err) } @@ -68,14 +68,14 @@ func superNode() { } wg := &sync.WaitGroup{} logWithCommand.Debug("initializing new super node service") - superNode, err := super_node.NewSuperNode(superNodeConfig) + superNode, err := watcher.NewSuperNode(superNodeConfig) if err != nil { logWithCommand.Fatal(err) } var forwardPayloadChan chan shared.ConvertedData if superNodeConfig.Serve { logWithCommand.Info("starting up super node servers") - forwardPayloadChan = make(chan shared.ConvertedData, super_node.PayloadChanBufferSize) + forwardPayloadChan = make(chan shared.ConvertedData, watcher.PayloadChanBufferSize) superNode.Serve(wg, forwardPayloadChan) if err := startServers(superNode, superNodeConfig); err != nil { logWithCommand.Fatal(err) @@ -87,10 +87,10 @@ func superNode() { logWithCommand.Fatal(err) } } - var backFiller super_node.BackFillInterface + var backFiller watcher.BackFillInterface if superNodeConfig.BackFill { logWithCommand.Debug("initializing new super node backfill service") - backFiller, err = super_node.NewBackFillService(superNodeConfig, forwardPayloadChan) + backFiller, err = watcher.NewBackFillService(superNodeConfig, forwardPayloadChan) if err != nil { logWithCommand.Fatal(err) } @@ -107,7 +107,7 @@ func superNode() { wg.Wait() } -func startServers(superNode super_node.SuperNode, settings *super_node.Config) error { +func startServers(superNode watcher.SuperNode, settings *watcher.Config) error { logWithCommand.Debug("starting up IPC server") _, _, err := rpc.StartIPCEndpoint(settings.IPCEndpoint, superNode.APIs()) if err != nil { diff --git a/db/migrations/00002_create_addresses_table.sql b/db/migrations/00002_create_addresses_table.sql deleted file mode 100644 index 6866f6e1..00000000 --- a/db/migrations/00002_create_addresses_table.sql +++ /dev/null @@ -1,11 +0,0 @@ --- +goose Up -CREATE TABLE public.addresses -( - id SERIAL PRIMARY KEY, - address character varying(42), - hashed_address character varying(66), - UNIQUE (address) -); - --- +goose Down -DROP TABLE public.addresses; \ No newline at end of file diff --git a/db/migrations/00011_create_eth_schema.sql b/db/migrations/00002_create_eth_schema.sql similarity index 100% rename from db/migrations/00011_create_eth_schema.sql rename to db/migrations/00002_create_eth_schema.sql diff --git a/db/migrations/00012_create_eth_header_cids_table.sql b/db/migrations/00003_create_eth_header_cids_table.sql similarity index 100% rename from db/migrations/00012_create_eth_header_cids_table.sql rename to db/migrations/00003_create_eth_header_cids_table.sql diff --git a/db/migrations/00003_create_headers_table.sql b/db/migrations/00003_create_headers_table.sql deleted file mode 100644 index 50616795..00000000 --- a/db/migrations/00003_create_headers_table.sql +++ /dev/null @@ -1,25 +0,0 @@ --- +goose Up -CREATE TABLE public.headers -( - id SERIAL PRIMARY KEY, - hash VARCHAR(66), - block_number BIGINT, - raw JSONB, - block_timestamp NUMERIC, - check_count INTEGER NOT NULL DEFAULT 0, - node_id INTEGER NOT NULL REFERENCES nodes (id) ON DELETE CASCADE, - eth_node_fingerprint VARCHAR(128), - UNIQUE (block_number, hash, eth_node_fingerprint) -); - -CREATE INDEX headers_block_number - ON public.headers (block_number); - -CREATE INDEX headers_block_timestamp - ON public.headers (block_timestamp); - --- +goose Down -DROP INDEX public.headers_block_number; -DROP INDEX public.headers_block_timestamp; - -DROP TABLE public.headers; diff --git a/db/migrations/00004_create_checked_headers_table.sql b/db/migrations/00004_create_checked_headers_table.sql deleted file mode 100644 index acf0fbdb..00000000 --- a/db/migrations/00004_create_checked_headers_table.sql +++ /dev/null @@ -1,8 +0,0 @@ --- +goose Up -CREATE TABLE public.checked_headers ( - id SERIAL PRIMARY KEY, - header_id INTEGER UNIQUE NOT NULL REFERENCES headers (id) ON DELETE CASCADE -); - --- +goose Down -DROP TABLE public.checked_headers; diff --git a/db/migrations/00013_create_eth_uncle_cids_table.sql b/db/migrations/00004_create_eth_uncle_cids_table.sql similarity index 100% rename from db/migrations/00013_create_eth_uncle_cids_table.sql rename to db/migrations/00004_create_eth_uncle_cids_table.sql diff --git a/db/migrations/00014_create_eth_transaction_cids_table.sql b/db/migrations/00005_create_eth_transaction_cids_table.sql similarity index 100% rename from db/migrations/00014_create_eth_transaction_cids_table.sql rename to db/migrations/00005_create_eth_transaction_cids_table.sql diff --git a/db/migrations/00005_create_storage_diffs_table.sql b/db/migrations/00005_create_storage_diffs_table.sql deleted file mode 100644 index 18deef11..00000000 --- a/db/migrations/00005_create_storage_diffs_table.sql +++ /dev/null @@ -1,14 +0,0 @@ --- +goose Up -CREATE TABLE public.storage_diff -( - id SERIAL PRIMARY KEY, - block_height BIGINT, - block_hash BYTEA, - hashed_address BYTEA, - storage_key BYTEA, - storage_value BYTEA, - UNIQUE (block_height, block_hash, hashed_address, storage_key, storage_value) -); - --- +goose Down -DROP TABLE public.storage_diff; \ No newline at end of file diff --git a/db/migrations/00015_create_eth_receipt_cids_table.sql b/db/migrations/00006_create_eth_receipt_cids_table.sql similarity index 100% rename from db/migrations/00015_create_eth_receipt_cids_table.sql rename to db/migrations/00006_create_eth_receipt_cids_table.sql diff --git a/db/migrations/00006_create_queued_storage_diffs_table.sql b/db/migrations/00006_create_queued_storage_diffs_table.sql deleted file mode 100644 index b184d641..00000000 --- a/db/migrations/00006_create_queued_storage_diffs_table.sql +++ /dev/null @@ -1,9 +0,0 @@ --- +goose Up -CREATE TABLE public.queued_storage -( - id SERIAL PRIMARY KEY, - diff_id BIGINT UNIQUE NOT NULL REFERENCES public.storage_diff (id) -); - --- +goose Down -DROP TABLE public.queued_storage; diff --git a/db/migrations/00016_create_eth_state_cids_table.sql b/db/migrations/00007_create_eth_state_cids_table.sql similarity index 100% rename from db/migrations/00016_create_eth_state_cids_table.sql rename to db/migrations/00007_create_eth_state_cids_table.sql diff --git a/db/migrations/00007_create_header_sync_transactions_table.sql b/db/migrations/00007_create_header_sync_transactions_table.sql deleted file mode 100644 index 253ad464..00000000 --- a/db/migrations/00007_create_header_sync_transactions_table.sql +++ /dev/null @@ -1,29 +0,0 @@ --- +goose Up -CREATE TABLE public.header_sync_transactions -( - id SERIAL PRIMARY KEY, - header_id INTEGER NOT NULL REFERENCES headers (id) ON DELETE CASCADE, - hash VARCHAR(66), - gas_limit NUMERIC, - gas_price NUMERIC, - input_data BYTEA, - nonce NUMERIC, - raw BYTEA, - tx_from VARCHAR(44), - tx_index INTEGER, - tx_to VARCHAR(44), - "value" NUMERIC, - UNIQUE (header_id, hash) -); - -CREATE INDEX header_sync_transactions_header - ON public.header_sync_transactions (header_id); - -CREATE INDEX header_sync_transactions_tx_index - ON public.header_sync_transactions (tx_index); - --- +goose Down -DROP INDEX public.header_sync_transactions_header; -DROP INDEX public.header_sync_transactions_tx_index; - -DROP TABLE header_sync_transactions; diff --git a/db/migrations/00017_create_eth_storage_cids_table.sql b/db/migrations/00008_create_eth_storage_cids_table.sql similarity index 100% rename from db/migrations/00017_create_eth_storage_cids_table.sql rename to db/migrations/00008_create_eth_storage_cids_table.sql diff --git a/db/migrations/00008_create_header_sync_receipts_table.sql b/db/migrations/00008_create_header_sync_receipts_table.sql deleted file mode 100644 index eac9e145..00000000 --- a/db/migrations/00008_create_header_sync_receipts_table.sql +++ /dev/null @@ -1,27 +0,0 @@ --- +goose Up -CREATE TABLE header_sync_receipts -( - id SERIAL PRIMARY KEY, - transaction_id INTEGER NOT NULL REFERENCES header_sync_transactions (id) ON DELETE CASCADE, - header_id INTEGER NOT NULL REFERENCES headers (id) ON DELETE CASCADE, - contract_address_id INTEGER NOT NULL REFERENCES addresses (id) ON DELETE CASCADE, - cumulative_gas_used NUMERIC, - gas_used NUMERIC, - state_root VARCHAR(66), - status INTEGER, - tx_hash VARCHAR(66), - rlp BYTEA, - UNIQUE (header_id, transaction_id) -); - -CREATE INDEX header_sync_receipts_header - ON public.header_sync_receipts (header_id); - -CREATE INDEX header_sync_receipts_transaction - ON public.header_sync_receipts (transaction_id); - --- +goose Down -DROP INDEX public.header_sync_receipts_header; -DROP INDEX public.header_sync_receipts_transaction; - -DROP TABLE header_sync_receipts; diff --git a/db/migrations/00009_create_header_sync_logs_table.sql b/db/migrations/00009_create_header_sync_logs_table.sql deleted file mode 100644 index 4a6ec73b..00000000 --- a/db/migrations/00009_create_header_sync_logs_table.sql +++ /dev/null @@ -1,22 +0,0 @@ --- +goose Up --- SQL in this section is executed when the migration is applied. -CREATE TABLE header_sync_logs -( - id SERIAL PRIMARY KEY, - header_id INTEGER NOT NULL REFERENCES headers (id) ON DELETE CASCADE, - address INTEGER NOT NULL REFERENCES addresses (id) ON DELETE CASCADE, - topics BYTEA[], - data BYTEA, - block_number BIGINT, - block_hash VARCHAR(66), - tx_hash VARCHAR(66), - tx_index INTEGER, - log_index INTEGER, - raw JSONB, - transformed BOOL NOT NULL DEFAULT FALSE, - UNIQUE (header_id, tx_index, log_index) -); - --- +goose Down --- SQL in this section is executed when the migration is rolled back. -DROP TABLE header_sync_logs; \ No newline at end of file diff --git a/db/migrations/00018_create_ipfs_blocks_table.sql b/db/migrations/00009_create_ipfs_blocks_table.sql similarity index 100% rename from db/migrations/00018_create_ipfs_blocks_table.sql rename to db/migrations/00009_create_ipfs_blocks_table.sql diff --git a/db/migrations/00019_create_btc_schema.sql b/db/migrations/00010_create_btc_schema.sql similarity index 100% rename from db/migrations/00019_create_btc_schema.sql rename to db/migrations/00010_create_btc_schema.sql diff --git a/db/migrations/00010_create_watched_logs_table.sql b/db/migrations/00010_create_watched_logs_table.sql deleted file mode 100644 index 4268a68a..00000000 --- a/db/migrations/00010_create_watched_logs_table.sql +++ /dev/null @@ -1,12 +0,0 @@ --- +goose Up --- SQL in this section is executed when the migration is applied. -CREATE TABLE public.watched_logs -( - id SERIAL PRIMARY KEY, - contract_address VARCHAR(42), - topic_zero VARCHAR(66) -); - --- +goose Down --- SQL in this section is executed when the migration is rolled back. -DROP TABLE public.watched_logs; diff --git a/db/migrations/00020_create_btc_header_cids_table.sql b/db/migrations/00011_create_btc_header_cids_table.sql similarity index 100% rename from db/migrations/00020_create_btc_header_cids_table.sql rename to db/migrations/00011_create_btc_header_cids_table.sql diff --git a/db/migrations/00021_create_btc_transaction_cids_table.sql b/db/migrations/00012_create_btc_transaction_cids_table.sql similarity index 100% rename from db/migrations/00021_create_btc_transaction_cids_table.sql rename to db/migrations/00012_create_btc_transaction_cids_table.sql diff --git a/db/migrations/00022_create_btc_tx_outputs_table.sql b/db/migrations/00013_create_btc_tx_outputs_table.sql similarity index 100% rename from db/migrations/00022_create_btc_tx_outputs_table.sql rename to db/migrations/00013_create_btc_tx_outputs_table.sql diff --git a/db/migrations/00023_create_btc_tx_inputs_table.sql b/db/migrations/00014_create_btc_tx_inputs_table.sql similarity index 100% rename from db/migrations/00023_create_btc_tx_inputs_table.sql rename to db/migrations/00014_create_btc_tx_inputs_table.sql diff --git a/db/migrations/00024_create_eth_queued_data_table.sql b/db/migrations/00015_create_eth_queued_data_table.sql similarity index 100% rename from db/migrations/00024_create_eth_queued_data_table.sql rename to db/migrations/00015_create_eth_queued_data_table.sql diff --git a/db/migrations/00025_create_btc_queued_data_table.sql b/db/migrations/00016_create_btc_queued_data_table.sql similarity index 100% rename from db/migrations/00025_create_btc_queued_data_table.sql rename to db/migrations/00016_create_btc_queued_data_table.sql diff --git a/db/migrations/00026_create_postgraphile_comments.sql b/db/migrations/00017_create_postgraphile_comments.sql similarity index 84% rename from db/migrations/00026_create_postgraphile_comments.sql rename to db/migrations/00017_create_postgraphile_comments.sql index 65711df0..6d7668ba 100644 --- a/db/migrations/00026_create_postgraphile_comments.sql +++ b/db/migrations/00017_create_postgraphile_comments.sql @@ -6,8 +6,6 @@ COMMENT ON TABLE btc.queue_data IS E'@name BtcQueueData'; COMMENT ON TABLE eth.transaction_cids IS E'@name EthTransactionCids'; COMMENT ON TABLE eth.header_cids IS E'@name EthHeaderCids'; COMMENT ON TABLE eth.queue_data IS E'@name EthQueueData'; -COMMENT ON TABLE public.headers IS E'@name EthHeaders'; -COMMENT ON COLUMN public.headers.node_id IS E'@name EthNodeID'; COMMENT ON COLUMN public.nodes.node_id IS E'@name ChainNodeID'; COMMENT ON COLUMN eth.header_cids.node_id IS E'@name EthNodeID'; COMMENT ON COLUMN btc.header_cids.node_id IS E'@name BtcNodeID'; \ No newline at end of file diff --git a/db/migrations/00027_update_state_cids.sql b/db/migrations/00018_update_state_cids.sql similarity index 100% rename from db/migrations/00027_update_state_cids.sql rename to db/migrations/00018_update_state_cids.sql diff --git a/db/migrations/00028_update_storage_cids.sql b/db/migrations/00019_update_storage_cids.sql similarity index 100% rename from db/migrations/00028_update_storage_cids.sql rename to db/migrations/00019_update_storage_cids.sql diff --git a/db/migrations/00029_update_header_cids.sql b/db/migrations/00020_update_header_cids.sql similarity index 100% rename from db/migrations/00029_update_header_cids.sql rename to db/migrations/00020_update_header_cids.sql diff --git a/db/migrations/00030_create_eth_state_accouts_table.sql b/db/migrations/00021_create_eth_state_accouts_table.sql similarity index 100% rename from db/migrations/00030_create_eth_state_accouts_table.sql rename to db/migrations/00021_create_eth_state_accouts_table.sql diff --git a/db/migrations/00031_rename_to_leaf_key.sql b/db/migrations/00022_rename_to_leaf_key.sql similarity index 100% rename from db/migrations/00031_rename_to_leaf_key.sql rename to db/migrations/00022_rename_to_leaf_key.sql diff --git a/db/migrations/00032_update_receipt_cids.sql b/db/migrations/00023_update_receipt_cids.sql similarity index 100% rename from db/migrations/00032_update_receipt_cids.sql rename to db/migrations/00023_update_receipt_cids.sql diff --git a/db/migrations/00033_add_times_validated.sql b/db/migrations/00024_add_times_validated.sql similarity index 100% rename from db/migrations/00033_add_times_validated.sql rename to db/migrations/00024_add_times_validated.sql diff --git a/db/schema.sql b/db/schema.sql index 36b5ffb1..a13e13a1 100644 --- a/db/schema.sql +++ b/db/schema.sql @@ -2,8 +2,8 @@ -- PostgreSQL database dump -- --- Dumped from database version 10.10 --- Dumped by pg_dump version 10.10 +-- Dumped from database version 12.1 +-- Dumped by pg_dump version 12.1 SET statement_timeout = 0; SET lock_timeout = 0; @@ -30,23 +30,9 @@ CREATE SCHEMA btc; CREATE SCHEMA eth; --- --- Name: plpgsql; Type: EXTENSION; Schema: -; Owner: - --- - -CREATE EXTENSION IF NOT EXISTS plpgsql WITH SCHEMA pg_catalog; - - --- --- Name: EXTENSION plpgsql; Type: COMMENT; Schema: -; Owner: - --- - -COMMENT ON EXTENSION plpgsql IS 'PL/pgSQL procedural language'; - - SET default_tablespace = ''; -SET default_with_oids = false; +SET default_table_access_method = heap; -- -- Name: header_cids; Type: TABLE; Schema: btc; Owner: - @@ -561,37 +547,6 @@ CREATE SEQUENCE eth.uncle_cids_id_seq ALTER SEQUENCE eth.uncle_cids_id_seq OWNED BY eth.uncle_cids.id; --- --- Name: addresses; Type: TABLE; Schema: public; Owner: - --- - -CREATE TABLE public.addresses ( - id integer NOT NULL, - address character varying(42), - hashed_address character varying(66) -); - - --- --- Name: addresses_id_seq; Type: SEQUENCE; Schema: public; Owner: - --- - -CREATE SEQUENCE public.addresses_id_seq - AS integer - START WITH 1 - INCREMENT BY 1 - NO MINVALUE - NO MAXVALUE - CACHE 1; - - --- --- Name: addresses_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: - --- - -ALTER SEQUENCE public.addresses_id_seq OWNED BY public.addresses.id; - - -- -- Name: blocks; Type: TABLE; Schema: public; Owner: - -- @@ -602,36 +557,6 @@ CREATE TABLE public.blocks ( ); --- --- Name: checked_headers; Type: TABLE; Schema: public; Owner: - --- - -CREATE TABLE public.checked_headers ( - id integer NOT NULL, - header_id integer NOT NULL -); - - --- --- Name: checked_headers_id_seq; Type: SEQUENCE; Schema: public; Owner: - --- - -CREATE SEQUENCE public.checked_headers_id_seq - AS integer - START WITH 1 - INCREMENT BY 1 - NO MINVALUE - NO MAXVALUE - CACHE 1; - - --- --- Name: checked_headers_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: - --- - -ALTER SEQUENCE public.checked_headers_id_seq OWNED BY public.checked_headers.id; - - -- -- Name: goose_db_version; Type: TABLE; Schema: public; Owner: - -- @@ -664,174 +589,6 @@ CREATE SEQUENCE public.goose_db_version_id_seq ALTER SEQUENCE public.goose_db_version_id_seq OWNED BY public.goose_db_version.id; --- --- Name: header_sync_logs; Type: TABLE; Schema: public; Owner: - --- - -CREATE TABLE public.header_sync_logs ( - id integer NOT NULL, - header_id integer NOT NULL, - address integer NOT NULL, - topics bytea[], - data bytea, - block_number bigint, - block_hash character varying(66), - tx_hash character varying(66), - tx_index integer, - log_index integer, - raw jsonb, - transformed boolean DEFAULT false NOT NULL -); - - --- --- Name: header_sync_logs_id_seq; Type: SEQUENCE; Schema: public; Owner: - --- - -CREATE SEQUENCE public.header_sync_logs_id_seq - AS integer - START WITH 1 - INCREMENT BY 1 - NO MINVALUE - NO MAXVALUE - CACHE 1; - - --- --- Name: header_sync_logs_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: - --- - -ALTER SEQUENCE public.header_sync_logs_id_seq OWNED BY public.header_sync_logs.id; - - --- --- Name: header_sync_receipts; Type: TABLE; Schema: public; Owner: - --- - -CREATE TABLE public.header_sync_receipts ( - id integer NOT NULL, - transaction_id integer NOT NULL, - header_id integer NOT NULL, - contract_address_id integer NOT NULL, - cumulative_gas_used numeric, - gas_used numeric, - state_root character varying(66), - status integer, - tx_hash character varying(66), - rlp bytea -); - - --- --- Name: header_sync_receipts_id_seq; Type: SEQUENCE; Schema: public; Owner: - --- - -CREATE SEQUENCE public.header_sync_receipts_id_seq - AS integer - START WITH 1 - INCREMENT BY 1 - NO MINVALUE - NO MAXVALUE - CACHE 1; - - --- --- Name: header_sync_receipts_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: - --- - -ALTER SEQUENCE public.header_sync_receipts_id_seq OWNED BY public.header_sync_receipts.id; - - --- --- Name: header_sync_transactions; Type: TABLE; Schema: public; Owner: - --- - -CREATE TABLE public.header_sync_transactions ( - id integer NOT NULL, - header_id integer NOT NULL, - hash character varying(66), - gas_limit numeric, - gas_price numeric, - input_data bytea, - nonce numeric, - raw bytea, - tx_from character varying(44), - tx_index integer, - tx_to character varying(44), - value numeric -); - - --- --- Name: header_sync_transactions_id_seq; Type: SEQUENCE; Schema: public; Owner: - --- - -CREATE SEQUENCE public.header_sync_transactions_id_seq - AS integer - START WITH 1 - INCREMENT BY 1 - NO MINVALUE - NO MAXVALUE - CACHE 1; - - --- --- Name: header_sync_transactions_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: - --- - -ALTER SEQUENCE public.header_sync_transactions_id_seq OWNED BY public.header_sync_transactions.id; - - --- --- Name: headers; Type: TABLE; Schema: public; Owner: - --- - -CREATE TABLE public.headers ( - id integer NOT NULL, - hash character varying(66), - block_number bigint, - raw jsonb, - block_timestamp numeric, - check_count integer DEFAULT 0 NOT NULL, - node_id integer NOT NULL, - eth_node_fingerprint character varying(128) -); - - --- --- Name: TABLE headers; Type: COMMENT; Schema: public; Owner: - --- - -COMMENT ON TABLE public.headers IS '@name EthHeaders'; - - --- --- Name: COLUMN headers.node_id; Type: COMMENT; Schema: public; Owner: - --- - -COMMENT ON COLUMN public.headers.node_id IS '@name EthNodeID'; - - --- --- Name: headers_id_seq; Type: SEQUENCE; Schema: public; Owner: - --- - -CREATE SEQUENCE public.headers_id_seq - AS integer - START WITH 1 - INCREMENT BY 1 - NO MINVALUE - NO MAXVALUE - CACHE 1; - - --- --- Name: headers_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: - --- - -ALTER SEQUENCE public.headers_id_seq OWNED BY public.headers.id; - - -- -- Name: nodes; Type: TABLE; Schema: public; Owner: - -- @@ -879,101 +636,6 @@ CREATE SEQUENCE public.nodes_id_seq ALTER SEQUENCE public.nodes_id_seq OWNED BY public.nodes.id; --- --- Name: queued_storage; Type: TABLE; Schema: public; Owner: - --- - -CREATE TABLE public.queued_storage ( - id integer NOT NULL, - diff_id bigint NOT NULL -); - - --- --- Name: queued_storage_id_seq; Type: SEQUENCE; Schema: public; Owner: - --- - -CREATE SEQUENCE public.queued_storage_id_seq - AS integer - START WITH 1 - INCREMENT BY 1 - NO MINVALUE - NO MAXVALUE - CACHE 1; - - --- --- Name: queued_storage_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: - --- - -ALTER SEQUENCE public.queued_storage_id_seq OWNED BY public.queued_storage.id; - - --- --- Name: storage_diff; Type: TABLE; Schema: public; Owner: - --- - -CREATE TABLE public.storage_diff ( - id integer NOT NULL, - block_height bigint, - block_hash bytea, - hashed_address bytea, - storage_key bytea, - storage_value bytea -); - - --- --- Name: storage_diff_id_seq; Type: SEQUENCE; Schema: public; Owner: - --- - -CREATE SEQUENCE public.storage_diff_id_seq - AS integer - START WITH 1 - INCREMENT BY 1 - NO MINVALUE - NO MAXVALUE - CACHE 1; - - --- --- Name: storage_diff_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: - --- - -ALTER SEQUENCE public.storage_diff_id_seq OWNED BY public.storage_diff.id; - - --- --- Name: watched_logs; Type: TABLE; Schema: public; Owner: - --- - -CREATE TABLE public.watched_logs ( - id integer NOT NULL, - contract_address character varying(42), - topic_zero character varying(66) -); - - --- --- Name: watched_logs_id_seq; Type: SEQUENCE; Schema: public; Owner: - --- - -CREATE SEQUENCE public.watched_logs_id_seq - AS integer - START WITH 1 - INCREMENT BY 1 - NO MINVALUE - NO MAXVALUE - CACHE 1; - - --- --- Name: watched_logs_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: - --- - -ALTER SEQUENCE public.watched_logs_id_seq OWNED BY public.watched_logs.id; - - -- -- Name: header_cids id; Type: DEFAULT; Schema: btc; Owner: - -- @@ -1065,20 +727,6 @@ ALTER TABLE ONLY eth.transaction_cids ALTER COLUMN id SET DEFAULT nextval('eth.t ALTER TABLE ONLY eth.uncle_cids ALTER COLUMN id SET DEFAULT nextval('eth.uncle_cids_id_seq'::regclass); --- --- Name: addresses id; Type: DEFAULT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.addresses ALTER COLUMN id SET DEFAULT nextval('public.addresses_id_seq'::regclass); - - --- --- Name: checked_headers id; Type: DEFAULT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.checked_headers ALTER COLUMN id SET DEFAULT nextval('public.checked_headers_id_seq'::regclass); - - -- -- Name: goose_db_version id; Type: DEFAULT; Schema: public; Owner: - -- @@ -1086,34 +734,6 @@ ALTER TABLE ONLY public.checked_headers ALTER COLUMN id SET DEFAULT nextval('pub ALTER TABLE ONLY public.goose_db_version ALTER COLUMN id SET DEFAULT nextval('public.goose_db_version_id_seq'::regclass); --- --- Name: header_sync_logs id; Type: DEFAULT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.header_sync_logs ALTER COLUMN id SET DEFAULT nextval('public.header_sync_logs_id_seq'::regclass); - - --- --- Name: header_sync_receipts id; Type: DEFAULT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.header_sync_receipts ALTER COLUMN id SET DEFAULT nextval('public.header_sync_receipts_id_seq'::regclass); - - --- --- Name: header_sync_transactions id; Type: DEFAULT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.header_sync_transactions ALTER COLUMN id SET DEFAULT nextval('public.header_sync_transactions_id_seq'::regclass); - - --- --- Name: headers id; Type: DEFAULT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.headers ALTER COLUMN id SET DEFAULT nextval('public.headers_id_seq'::regclass); - - -- -- Name: nodes id; Type: DEFAULT; Schema: public; Owner: - -- @@ -1121,27 +741,6 @@ ALTER TABLE ONLY public.headers ALTER COLUMN id SET DEFAULT nextval('public.head ALTER TABLE ONLY public.nodes ALTER COLUMN id SET DEFAULT nextval('public.nodes_id_seq'::regclass); --- --- Name: queued_storage id; Type: DEFAULT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.queued_storage ALTER COLUMN id SET DEFAULT nextval('public.queued_storage_id_seq'::regclass); - - --- --- Name: storage_diff id; Type: DEFAULT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.storage_diff ALTER COLUMN id SET DEFAULT nextval('public.storage_diff_id_seq'::regclass); - - --- --- Name: watched_logs id; Type: DEFAULT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.watched_logs ALTER COLUMN id SET DEFAULT nextval('public.watched_logs_id_seq'::regclass); - - -- -- Name: header_cids header_cids_block_number_block_hash_key; Type: CONSTRAINT; Schema: btc; Owner: - -- @@ -1350,22 +949,6 @@ ALTER TABLE ONLY eth.uncle_cids ADD CONSTRAINT uncle_cids_pkey PRIMARY KEY (id); --- --- Name: addresses addresses_address_key; Type: CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.addresses - ADD CONSTRAINT addresses_address_key UNIQUE (address); - - --- --- Name: addresses addresses_pkey; Type: CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.addresses - ADD CONSTRAINT addresses_pkey PRIMARY KEY (id); - - -- -- Name: blocks blocks_key_key; Type: CONSTRAINT; Schema: public; Owner: - -- @@ -1374,22 +957,6 @@ ALTER TABLE ONLY public.blocks ADD CONSTRAINT blocks_key_key UNIQUE (key); --- --- Name: checked_headers checked_headers_header_id_key; Type: CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.checked_headers - ADD CONSTRAINT checked_headers_header_id_key UNIQUE (header_id); - - --- --- Name: checked_headers checked_headers_pkey; Type: CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.checked_headers - ADD CONSTRAINT checked_headers_pkey PRIMARY KEY (id); - - -- -- Name: goose_db_version goose_db_version_pkey; Type: CONSTRAINT; Schema: public; Owner: - -- @@ -1398,70 +965,6 @@ ALTER TABLE ONLY public.goose_db_version ADD CONSTRAINT goose_db_version_pkey PRIMARY KEY (id); --- --- Name: header_sync_logs header_sync_logs_header_id_tx_index_log_index_key; Type: CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.header_sync_logs - ADD CONSTRAINT header_sync_logs_header_id_tx_index_log_index_key UNIQUE (header_id, tx_index, log_index); - - --- --- Name: header_sync_logs header_sync_logs_pkey; Type: CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.header_sync_logs - ADD CONSTRAINT header_sync_logs_pkey PRIMARY KEY (id); - - --- --- Name: header_sync_receipts header_sync_receipts_header_id_transaction_id_key; Type: CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.header_sync_receipts - ADD CONSTRAINT header_sync_receipts_header_id_transaction_id_key UNIQUE (header_id, transaction_id); - - --- --- Name: header_sync_receipts header_sync_receipts_pkey; Type: CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.header_sync_receipts - ADD CONSTRAINT header_sync_receipts_pkey PRIMARY KEY (id); - - --- --- Name: header_sync_transactions header_sync_transactions_header_id_hash_key; Type: CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.header_sync_transactions - ADD CONSTRAINT header_sync_transactions_header_id_hash_key UNIQUE (header_id, hash); - - --- --- Name: header_sync_transactions header_sync_transactions_pkey; Type: CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.header_sync_transactions - ADD CONSTRAINT header_sync_transactions_pkey PRIMARY KEY (id); - - --- --- Name: headers headers_block_number_hash_eth_node_fingerprint_key; Type: CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.headers - ADD CONSTRAINT headers_block_number_hash_eth_node_fingerprint_key UNIQUE (block_number, hash, eth_node_fingerprint); - - --- --- Name: headers headers_pkey; Type: CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.headers - ADD CONSTRAINT headers_pkey PRIMARY KEY (id); - - -- -- Name: nodes node_uc; Type: CONSTRAINT; Schema: public; Owner: - -- @@ -1478,88 +981,6 @@ ALTER TABLE ONLY public.nodes ADD CONSTRAINT nodes_pkey PRIMARY KEY (id); --- --- Name: queued_storage queued_storage_diff_id_key; Type: CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.queued_storage - ADD CONSTRAINT queued_storage_diff_id_key UNIQUE (diff_id); - - --- --- Name: queued_storage queued_storage_pkey; Type: CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.queued_storage - ADD CONSTRAINT queued_storage_pkey PRIMARY KEY (id); - - --- --- Name: storage_diff storage_diff_block_height_block_hash_hashed_address_storage_key; Type: CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.storage_diff - ADD CONSTRAINT storage_diff_block_height_block_hash_hashed_address_storage_key UNIQUE (block_height, block_hash, hashed_address, storage_key, storage_value); - - --- --- Name: storage_diff storage_diff_pkey; Type: CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.storage_diff - ADD CONSTRAINT storage_diff_pkey PRIMARY KEY (id); - - --- --- Name: watched_logs watched_logs_pkey; Type: CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.watched_logs - ADD CONSTRAINT watched_logs_pkey PRIMARY KEY (id); - - --- --- Name: header_sync_receipts_header; Type: INDEX; Schema: public; Owner: - --- - -CREATE INDEX header_sync_receipts_header ON public.header_sync_receipts USING btree (header_id); - - --- --- Name: header_sync_receipts_transaction; Type: INDEX; Schema: public; Owner: - --- - -CREATE INDEX header_sync_receipts_transaction ON public.header_sync_receipts USING btree (transaction_id); - - --- --- Name: header_sync_transactions_header; Type: INDEX; Schema: public; Owner: - --- - -CREATE INDEX header_sync_transactions_header ON public.header_sync_transactions USING btree (header_id); - - --- --- Name: header_sync_transactions_tx_index; Type: INDEX; Schema: public; Owner: - --- - -CREATE INDEX header_sync_transactions_tx_index ON public.header_sync_transactions USING btree (tx_index); - - --- --- Name: headers_block_number; Type: INDEX; Schema: public; Owner: - --- - -CREATE INDEX headers_block_number ON public.headers USING btree (block_number); - - --- --- Name: headers_block_timestamp; Type: INDEX; Schema: public; Owner: - --- - -CREATE INDEX headers_block_timestamp ON public.headers USING btree (block_timestamp); - - -- -- Name: header_cids header_cids_node_id_fkey; Type: FK CONSTRAINT; Schema: btc; Owner: - -- @@ -1648,78 +1069,6 @@ ALTER TABLE ONLY eth.uncle_cids ADD CONSTRAINT uncle_cids_header_id_fkey FOREIGN KEY (header_id) REFERENCES eth.header_cids(id) ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED; --- --- Name: checked_headers checked_headers_header_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.checked_headers - ADD CONSTRAINT checked_headers_header_id_fkey FOREIGN KEY (header_id) REFERENCES public.headers(id) ON DELETE CASCADE; - - --- --- Name: header_sync_logs header_sync_logs_address_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.header_sync_logs - ADD CONSTRAINT header_sync_logs_address_fkey FOREIGN KEY (address) REFERENCES public.addresses(id) ON DELETE CASCADE; - - --- --- Name: header_sync_logs header_sync_logs_header_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.header_sync_logs - ADD CONSTRAINT header_sync_logs_header_id_fkey FOREIGN KEY (header_id) REFERENCES public.headers(id) ON DELETE CASCADE; - - --- --- Name: header_sync_receipts header_sync_receipts_contract_address_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.header_sync_receipts - ADD CONSTRAINT header_sync_receipts_contract_address_id_fkey FOREIGN KEY (contract_address_id) REFERENCES public.addresses(id) ON DELETE CASCADE; - - --- --- Name: header_sync_receipts header_sync_receipts_header_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.header_sync_receipts - ADD CONSTRAINT header_sync_receipts_header_id_fkey FOREIGN KEY (header_id) REFERENCES public.headers(id) ON DELETE CASCADE; - - --- --- Name: header_sync_receipts header_sync_receipts_transaction_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.header_sync_receipts - ADD CONSTRAINT header_sync_receipts_transaction_id_fkey FOREIGN KEY (transaction_id) REFERENCES public.header_sync_transactions(id) ON DELETE CASCADE; - - --- --- Name: header_sync_transactions header_sync_transactions_header_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.header_sync_transactions - ADD CONSTRAINT header_sync_transactions_header_id_fkey FOREIGN KEY (header_id) REFERENCES public.headers(id) ON DELETE CASCADE; - - --- --- Name: headers headers_node_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.headers - ADD CONSTRAINT headers_node_id_fkey FOREIGN KEY (node_id) REFERENCES public.nodes(id) ON DELETE CASCADE; - - --- --- Name: queued_storage queued_storage_diff_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.queued_storage - ADD CONSTRAINT queued_storage_diff_id_fkey FOREIGN KEY (diff_id) REFERENCES public.storage_diff(id); - - -- -- PostgreSQL database dump complete -- diff --git a/dockerfiles/migrations/Dockerfile b/dockerfiles/migrations/Dockerfile index fd4c9139..bb143005 100644 --- a/dockerfiles/migrations/Dockerfile +++ b/dockerfiles/migrations/Dockerfile @@ -8,14 +8,14 @@ RUN apk add busybox-extras FROM golang:1.12.4 as builder # Get and build vulcanizedb -ADD . /go/src/github.com/vulcanize/vulcanizedb +ADD . /go/src/github.com/vulcanize/ipfs-chain-watcher # Build migration tool RUN go get -u -d github.com/pressly/goose/cmd/goose WORKDIR /go/src/github.com/pressly/goose/cmd/goose RUN GCO_ENABLED=0 GOOS=linux go build -a -installsuffix cgo -ldflags '-extldflags "-static"' -tags='no_mysql no_sqlite' -o goose . -WORKDIR /go/src/github.com/vulcanize/vulcanizedb +WORKDIR /go/src/github.com/vulcanize/ipfs-chain-watcher # app container FROM alpine @@ -29,12 +29,12 @@ USER $USER # chown first so dir is writable # note: using $USER is merged, but not in the stable release yet -COPY --chown=5000:5000 --from=builder /go/src/github.com/vulcanize/vulcanizedb/dockerfiles/migrations/startup_script.sh . +COPY --chown=5000:5000 --from=builder /go/src/github.com/vulcanize/ipfs-chain-watcher/dockerfiles/migrations/startup_script.sh . # keep binaries immutable COPY --from=builder /go/src/github.com/pressly/goose/cmd/goose/goose goose -COPY --from=builder /go/src/github.com/vulcanize/vulcanizedb/db/migrations migrations/vulcanizedb +COPY --from=builder /go/src/github.com/vulcanize/ipfs-chain-watcher/db/migrations migrations/vulcanizedb # XXX dir is already writeable RUN touch vulcanizedb.log CMD ["./startup_script.sh"] \ No newline at end of file diff --git a/dockerfiles/super_node/Dockerfile b/dockerfiles/super_node/Dockerfile index 2c29c455..4f209d79 100644 --- a/dockerfiles/super_node/Dockerfile +++ b/dockerfiles/super_node/Dockerfile @@ -5,8 +5,8 @@ RUN apk --update --no-cache add make git g++ linux-headers RUN apk add busybox-extras # Get and build vulcanizedb -ADD . /go/src/github.com/vulcanize/vulcanizedb -WORKDIR /go/src/github.com/vulcanize/vulcanizedb +ADD . /go/src/github.com/vulcanize/ipfs-chain-watcher +WORKDIR /go/src/github.com/vulcanize/ipfs-chain-watcher RUN GO111MODULE=on GCO_ENABLED=0 GOOS=linux go build -a -installsuffix cgo -ldflags '-extldflags "-static"' -o vulcanizedb . # Build migration tool @@ -14,7 +14,7 @@ RUN go get -u -d github.com/pressly/goose/cmd/goose WORKDIR /go/src/github.com/pressly/goose/cmd/goose RUN GCO_ENABLED=0 GOOS=linux go build -a -installsuffix cgo -ldflags '-extldflags "-static"' -tags='no_mysql no_sqlite' -o goose . -WORKDIR /go/src/github.com/vulcanize/vulcanizedb +WORKDIR /go/src/github.com/vulcanize/ipfs-chain-watcher # app container FROM alpine @@ -31,16 +31,16 @@ USER $USER # chown first so dir is writable # note: using $USER is merged, but not in the stable release yet -COPY --chown=5000:5000 --from=builder /go/src/github.com/vulcanize/vulcanizedb/$CONFIG_FILE config.toml -COPY --chown=5000:5000 --from=builder /go/src/github.com/vulcanize/vulcanizedb/dockerfiles/super_node/startup_script.sh . -COPY --chown=5000:5000 --from=builder /go/src/github.com/vulcanize/vulcanizedb/dockerfiles/super_node/entrypoint.sh . +COPY --chown=5000:5000 --from=builder /go/src/github.com/vulcanize/ipfs-chain-watcher/$CONFIG_FILE config.toml +COPY --chown=5000:5000 --from=builder /go/src/github.com/vulcanize/ipfs-chain-watcher/dockerfiles/super_node/startup_script.sh . +COPY --chown=5000:5000 --from=builder /go/src/github.com/vulcanize/ipfs-chain-watcher/dockerfiles/super_node/entrypoint.sh . # keep binaries immutable -COPY --from=builder /go/src/github.com/vulcanize/vulcanizedb/vulcanizedb vulcanizedb +COPY --from=builder /go/src/github.com/vulcanize/ipfs-chain-watcher/vulcanizedb vulcanizedb COPY --from=builder /go/src/github.com/pressly/goose/cmd/goose/goose goose -COPY --from=builder /go/src/github.com/vulcanize/vulcanizedb/db/migrations migrations/vulcanizedb -COPY --from=builder /go/src/github.com/vulcanize/vulcanizedb/environments environments +COPY --from=builder /go/src/github.com/vulcanize/ipfs-chain-watcher/db/migrations migrations/vulcanizedb +COPY --from=builder /go/src/github.com/vulcanize/ipfs-chain-watcher/environments environments EXPOSE $EXPOSE_PORT_1 EXPOSE $EXPOSE_PORT_2 diff --git a/documentation/custom-transformers.md b/documentation/custom-transformers.md index d45afcdf..edde070e 100644 --- a/documentation/custom-transformers.md +++ b/documentation/custom-transformers.md @@ -63,7 +63,7 @@ these are run independently, instead of using `composeAndExecute`, a couple of t * The `execute` command does not require the plugin transformer dependencies be located in their `$GOPATH` directories, instead it expects a .so file (of the name specified in the config file) to be in -`$GOPATH/src/github.com/vulcanize/vulcanizedb/plugins/` and, as noted above, also expects the plugin db migrations to +`$GOPATH/src/github.com/vulcanize/ipfs-chain-watcher/plugins/` and, as noted above, also expects the plugin db migrations to have already been ran against the database. * Usage: @@ -103,7 +103,7 @@ The config provides information for composing a set of transformers from externa wsPath = "ws://127.0.0.1:8546" [exporter] - home = "github.com/vulcanize/vulcanizedb" + home = "github.com/vulcanize/ipfs-chain-watcher" name = "exampleTransformerExporter" save = false transformerNames = [ @@ -137,7 +137,7 @@ The config provides information for composing a set of transformers from externa migrations = "to/db/migrations" rank = "1" ``` -- `home` is the name of the package you are building the plugin for, in most cases this is github.com/vulcanize/vulcanizedb +- `home` is the name of the package you are building the plugin for, in most cases this is github.com/vulcanize/ipfs-chain-watcher - `name` is the name used for the plugin files (.so and .go) - `save` indicates whether or not the user wants to save the .go file instead of removing it after .so compilation. Sometimes useful for debugging/trouble-shooting purposes. - `transformerNames` is the list of the names of the transformers we are composing together, so we know how to access their submaps in the exporter map @@ -174,7 +174,7 @@ The general structure of a plugin .go file, and what we would see built with the package main import ( - interface1 "github.com/vulcanize/vulcanizedb/libraries/shared/transformer" + interface1 "github.com/vulcanize/ipfs-chain-watcher/libraries/shared/transformer" transformer1 "github.com/account/repo/path/to/transformer1" transformer2 "github.com/account/repo/path/to/transformer2" transformer3 "github.com/account/repo/path/to/transformer3" diff --git a/documentation/super_node/apis.md b/documentation/super_node/apis.md index c6a00701..3a0952c3 100644 --- a/documentation/super_node/apis.md +++ b/documentation/super_node/apis.md @@ -40,10 +40,10 @@ An example of how to subscribe to a real-time Ethereum data feed from the super "github.com/ethereum/go-ethereum/rpc" "github.com/spf13/viper" - "github.com/vulcanize/vulcanizedb/libraries/shared/streamer" - "github.com/vulcanize/vulcanizedb/pkg/eth/client" - "github.com/vulcanize/vulcanizedb/pkg/super_node" - "github.com/vulcanize/vulcanizedb/pkg/super_node/eth" + "github.com/vulcanize/ipfs-chain-watcher/libraries/shared/streamer" + "github.com/vulcanize/ipfs-chain-watcher/pkg/eth/client" + "github.com/vulcanize/ipfs-chain-watcher/pkg/super_node" + "github.com/vulcanize/ipfs-chain-watcher/pkg/super_node/eth" ) config, _ := eth.NewEthSubscriptionConfig() @@ -162,10 +162,10 @@ An example of how to subscribe to a real-time Bitcoin data feed from the super n "github.com/ethereum/go-ethereum/rpc" "github.com/spf13/viper" - "github.com/vulcanize/vulcanizedb/libraries/shared/streamer" - "github.com/vulcanize/vulcanizedb/pkg/eth/client" - "github.com/vulcanize/vulcanizedb/pkg/super_node" - "github.com/vulcanize/vulcanizedb/pkg/super_node/btc" + "github.com/vulcanize/ipfs-chain-watcher/libraries/shared/streamer" + "github.com/vulcanize/ipfs-chain-watcher/pkg/eth/client" + "github.com/vulcanize/ipfs-chain-watcher/pkg/super_node" + "github.com/vulcanize/ipfs-chain-watcher/pkg/super_node/btc" ) config, _ := btc.NewBtcSubscriptionConfig() diff --git a/documentation/super_node/setup.md b/documentation/super_node/setup.md index b03eefaa..fa95a6ef 100644 --- a/documentation/super_node/setup.md +++ b/documentation/super_node/setup.md @@ -138,9 +138,9 @@ Finally, we can begin the vulcanizeDB process itself. Start by downloading vulcanizedb and moving into the repo: -`go get github.com/vulcanize/vulcanizedb` +`go get github.com/vulcanize/ipfs-chain-watcher` -`cd $GOPATH/src/github.com/vulcanize/vulcanizedb` +`cd $GOPATH/src/github.com/vulcanize/ipfs-chain-watcher` Run the db migrations against the Postgres database we created for vulcanizeDB: diff --git a/environments/composeAndExecuteAccountTransformer.toml b/environments/composeAndExecuteAccountTransformer.toml deleted file mode 100644 index 97a2b21a..00000000 --- a/environments/composeAndExecuteAccountTransformer.toml +++ /dev/null @@ -1,38 +0,0 @@ -[database] - name = "vulcanize_public" - hostname = "localhost" - port = 5432 - -[client] - ipcPath = "" - -[exporter] - home = "github.com/vulcanize/vulcanizedb" - name = "accountTransformerExporter" - save = false - transformerNames = [ - "account" - ] - [exporter.account] - path = "transformers/account/light/initializer" - type = "eth_contract" - repository = "github.com/vulcanize/account_transformers" - migrations = "db/migrations" - rank = "0" - -[token] - addresses = [ - "0x58b6A8A3302369DAEc383334672404Ee733aB239", - "0x862Da0A691bb0b74038377295f8fF523D0493eB4", - ] - [token.equivalents] - 0x0000000000085d4780B73119b644AE5ecd22b376 = [ - "0x8dd5fbCe2F6a956C3022bA3663759011Dd51e73E" - ] - 0x58b6A8A3302369DAEc383334672404Ee733aB239 = [ - "0x8e306b005773bee6bA6A6e8972Bc79D766cC15c8" - ] - -[account] - start = 0 - addresses = [] diff --git a/environments/example.toml b/environments/example.toml deleted file mode 100644 index e025baaa..00000000 --- a/environments/example.toml +++ /dev/null @@ -1,26 +0,0 @@ -[database] - name = "vulcanize_public" - hostname = "localhost" - port = 5432 - -[client] - ipcPath = "" - -[contract] - network = "" - addresses = [ - "0x314159265dD8dbb310642f98f50C066173C1259b", - "0x8dd5fbCe2F6a956C3022bA3663759011Dd51e73E" - ] - [contract.0x314159265dD8dbb310642f98f50C066173C1259b] - abi = '[{"constant":true,"inputs":[{"name":"node","type":"bytes32"}],"name":"resolver","outputs":[{"name":"","type":"address"}],"payable":false,"type":"function"},{"constant":true,"inputs":[{"name":"node","type":"bytes32"}],"name":"owner","outputs":[{"name":"","type":"address"}],"payable":false,"type":"function"},{"constant":false,"inputs":[{"name":"node","type":"bytes32"},{"name":"label","type":"bytes32"},{"name":"owner","type":"address"}],"name":"setSubnodeOwner","outputs":[],"payable":false,"type":"function"},{"constant":false,"inputs":[{"name":"node","type":"bytes32"},{"name":"ttl","type":"uint64"}],"name":"setTTL","outputs":[],"payable":false,"type":"function"},{"constant":true,"inputs":[{"name":"node","type":"bytes32"}],"name":"ttl","outputs":[{"name":"","type":"uint64"}],"payable":false,"type":"function"},{"constant":false,"inputs":[{"name":"node","type":"bytes32"},{"name":"resolver","type":"address"}],"name":"setResolver","outputs":[],"payable":false,"type":"function"},{"constant":false,"inputs":[{"name":"node","type":"bytes32"},{"name":"owner","type":"address"}],"name":"setOwner","outputs":[],"payable":false,"type":"function"},{"anonymous":false,"inputs":[{"indexed":true,"name":"node","type":"bytes32"},{"indexed":false,"name":"owner","type":"address"}],"name":"Transfer","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"node","type":"bytes32"},{"indexed":true,"name":"label","type":"bytes32"},{"indexed":false,"name":"owner","type":"address"}],"name":"NewOwner","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"node","type":"bytes32"},{"indexed":false,"name":"resolver","type":"address"}],"name":"NewResolver","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"node","type":"bytes32"},{"indexed":false,"name":"ttl","type":"uint64"}],"name":"NewTTL","type":"event"}]' - startingBlock = 3327417 - [contract.0x8dd5fbCe2F6a956C3022bA3663759011Dd51e73E] - events = [ - "Transfer", - "Issue" - ] - methods = [ - "balanceOf" - ] - startingBlock = 5197514 \ No newline at end of file diff --git a/environments/public.toml.example b/environments/public.toml.example deleted file mode 100644 index 52f8a2d1..00000000 --- a/environments/public.toml.example +++ /dev/null @@ -1,8 +0,0 @@ -[database] - name = "vulcanize_public" - hostname = "localhost" - port = 5432 - -[client] - ipcPath = - levelDbPath = diff --git a/go.mod b/go.mod index 6ba1ac6b..e9d80bc4 100644 --- a/go.mod +++ b/go.mod @@ -1,4 +1,4 @@ -module github.com/vulcanize/vulcanizedb +module github.com/vulcanize/ipfs-chain-watcher go 1.12 diff --git a/go.sum b/go.sum index 7000ae8f..c174b7da 100644 --- a/go.sum +++ b/go.sum @@ -83,7 +83,6 @@ github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwc github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= github.com/cskr/pubsub v1.0.2 h1:vlOzMhl6PFn60gRlTQQsIfVwaPB/B/8MziK8FhEPt/0= github.com/cskr/pubsub v1.0.2/go.mod h1:/8MzYXk/NJAz782G8RPkFzXTZVu63VotefPnR9TIRis= -github.com/dave/jennifer v1.3.0 h1:p3tl41zjjCZTNBytMwrUuiAnherNUZktlhPTKoF/sEk= github.com/dave/jennifer v1.3.0/go.mod h1:fIb+770HOpJ2fmN9EPPKOqm1vMGhB+TwXKMZhrIygKg= github.com/davecgh/go-spew v0.0.0-20171005155431-ecdeabc65495/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= @@ -611,7 +610,6 @@ github.com/polydawn/refmt v0.0.0-20190221155625-df39d6c2d992/go.mod h1:uIp+gprXx github.com/polydawn/refmt v0.0.0-20190408063855-01bf1e26dd14/go.mod h1:uIp+gprXxxrWSjjklXD+mN4wed/tMfjMMmN/9+JsA9o= github.com/polydawn/refmt v0.0.0-20190731040541-eff0b363297a h1:TdavzKWkPcC2G+6rKJclm/JfrWC6WZFfLUR7EJJX8MA= github.com/polydawn/refmt v0.0.0-20190731040541-eff0b363297a/go.mod h1:uIp+gprXxxrWSjjklXD+mN4wed/tMfjMMmN/9+JsA9o= -github.com/pressly/goose v2.6.0+incompatible h1:3f8zIQ8rfgP9tyI0Hmcs2YNAqUCL1c+diLe3iU8Qd/k= github.com/pressly/goose v2.6.0+incompatible/go.mod h1:m+QHWCqxR3k8D9l7qfzuC/djtlfzxr34mozWDYEu1z8= github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= github.com/prometheus/client_golang v0.9.3/go.mod h1:/TN21ttK/J9q6uSwhBd54HahCDft0ttaMvbicHlPoso= @@ -690,8 +688,6 @@ github.com/tyler-smith/go-bip39 v1.0.1-0.20181017060643-dbb3b84ba2ef/go.mod h1:s github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGrc= github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0= github.com/urfave/cli v1.22.1/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= -github.com/vulcanize/go-ethereum v1.5.10-0.20200311182536-d07dc803d290 h1:uMWt+x6JhVT7GyL983weZSxv1zDBxvGlI9HNkcTnUeg= -github.com/vulcanize/go-ethereum v1.5.10-0.20200311182536-d07dc803d290/go.mod h1:7oC0Ni6dosMv5pxMigm6s0hN8g4haJMBnqmmo0D9YfQ= github.com/vulcanize/go-ethereum v1.9.11-statediff-0.0.2 h1:ebv2bWocCmNKGnpHtRjSWoTpkgyEbRBb028PanH43H8= github.com/vulcanize/go-ethereum v1.9.11-statediff-0.0.2/go.mod h1:7oC0Ni6dosMv5pxMigm6s0hN8g4haJMBnqmmo0D9YfQ= github.com/vulcanize/go-ipfs v0.4.22-alpha h1:W+6njT14KWllMhABRFtPndqHw8SHCt5SqD4YX528kxM= @@ -788,7 +784,6 @@ golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190423024810-112230192c58 h1:8gQV6CLnAEikrhgkHFbMAEhagSSnXWGV915qUMm9mrU= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= diff --git a/integration_test/block_rewards_test.go b/integration_test/block_rewards_test.go deleted file mode 100644 index 10ccdea8..00000000 --- a/integration_test/block_rewards_test.go +++ /dev/null @@ -1,62 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package integration - -import ( - "github.com/ethereum/go-ethereum/ethclient" - "github.com/ethereum/go-ethereum/rpc" - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - - "github.com/vulcanize/vulcanizedb/pkg/eth" - "github.com/vulcanize/vulcanizedb/pkg/eth/client" - vRpc "github.com/vulcanize/vulcanizedb/pkg/eth/converters/rpc" - "github.com/vulcanize/vulcanizedb/pkg/eth/node" - "github.com/vulcanize/vulcanizedb/test_config" -) - -var _ = Describe("Rewards calculations", func() { - - It("calculates a block reward for a real block", func() { - rawRPCClient, err := rpc.Dial(test_config.TestClient.IPCPath) - Expect(err).NotTo(HaveOccurred()) - rpcClient := client.NewRPCClient(rawRPCClient, test_config.TestClient.IPCPath) - ethClient := ethclient.NewClient(rawRPCClient) - blockChainClient := client.NewEthClient(ethClient) - node := node.MakeNode(rpcClient) - transactionConverter := vRpc.NewRPCTransactionConverter(ethClient) - blockChain := eth.NewBlockChain(blockChainClient, rpcClient, node, transactionConverter) - block, err := blockChain.GetBlockByNumber(1071819) - Expect(err).ToNot(HaveOccurred()) - Expect(block.Reward).To(Equal("5313550000000000000")) - }) - - It("calculates an uncle reward for a real block", func() { - rawRPCClient, err := rpc.Dial(test_config.TestClient.IPCPath) - Expect(err).NotTo(HaveOccurred()) - rpcClient := client.NewRPCClient(rawRPCClient, test_config.TestClient.IPCPath) - ethClient := ethclient.NewClient(rawRPCClient) - blockChainClient := client.NewEthClient(ethClient) - node := node.MakeNode(rpcClient) - transactionConverter := vRpc.NewRPCTransactionConverter(ethClient) - blockChain := eth.NewBlockChain(blockChainClient, rpcClient, node, transactionConverter) - block, err := blockChain.GetBlockByNumber(1071819) - Expect(err).ToNot(HaveOccurred()) - Expect(block.UnclesReward).To(Equal("6875000000000000000")) - }) - -}) diff --git a/integration_test/contract_test.go b/integration_test/contract_test.go deleted file mode 100644 index d04c4662..00000000 --- a/integration_test/contract_test.go +++ /dev/null @@ -1,110 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package integration - -import ( - "math/big" - - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/ethclient" - "github.com/ethereum/go-ethereum/rpc" - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - - "github.com/vulcanize/vulcanizedb/pkg/eth" - "github.com/vulcanize/vulcanizedb/pkg/eth/client" - rpc2 "github.com/vulcanize/vulcanizedb/pkg/eth/converters/rpc" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" - "github.com/vulcanize/vulcanizedb/pkg/eth/node" - "github.com/vulcanize/vulcanizedb/pkg/eth/testing" - "github.com/vulcanize/vulcanizedb/test_config" -) - -var _ = Describe("Reading contracts", func() { - - Describe("Getting a contract attribute", func() { - It("retrieves the event log for a specific block and contract", func() { - expectedLogZero := core.FullSyncLog{ - BlockNumber: 4703824, - TxHash: "0xf896bfd1eb539d881a1a31102b78de9f25cd591bf1fe1924b86148c0b205fd5d", - Address: "0xd26114cd6ee289accf82350c8d8487fedb8a0c07", - Topics: core.Topics{ - 0: "0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef", - 1: "0x000000000000000000000000fbb1b73c4f0bda4f67dca266ce6ef42f520fbb98", - 2: "0x000000000000000000000000d26114cd6ee289accf82350c8d8487fedb8a0c07", - }, - Index: 19, - Data: "0x0000000000000000000000000000000000000000000000000c7d713b49da0000"} - rawRPCClient, err := rpc.Dial(test_config.TestClient.IPCPath) - Expect(err).NotTo(HaveOccurred()) - rpcClient := client.NewRPCClient(rawRPCClient, test_config.TestClient.IPCPath) - ethClient := ethclient.NewClient(rawRPCClient) - blockChainClient := client.NewEthClient(ethClient) - node := node.MakeNode(rpcClient) - transactionConverter := rpc2.NewRPCTransactionConverter(ethClient) - blockChain := eth.NewBlockChain(blockChainClient, rpcClient, node, transactionConverter) - contract := testing.SampleContract() - - logs, err := blockChain.GetFullSyncLogs(contract, big.NewInt(4703824), nil) - - Expect(err).To(BeNil()) - Expect(len(logs)).To(Equal(3)) - Expect(logs[0]).To(Equal(expectedLogZero)) - }) - - It("returns and empty log array when no events for a given block / contract combo", func() { - rawRPCClient, err := rpc.Dial(test_config.TestClient.IPCPath) - Expect(err).NotTo(HaveOccurred()) - rpcClient := client.NewRPCClient(rawRPCClient, test_config.TestClient.IPCPath) - ethClient := ethclient.NewClient(rawRPCClient) - blockChainClient := client.NewEthClient(ethClient) - node := node.MakeNode(rpcClient) - transactionConverter := rpc2.NewRPCTransactionConverter(ethClient) - blockChain := eth.NewBlockChain(blockChainClient, rpcClient, node, transactionConverter) - - logs, err := blockChain.GetFullSyncLogs(core.Contract{Hash: "0x123"}, big.NewInt(4703824), nil) - - Expect(err).To(BeNil()) - Expect(len(logs)).To(Equal(0)) - }) - }) - - Describe("Fetching Contract data", func() { - It("returns the correct attribute for a real contract", func() { - rawRPCClient, err := rpc.Dial(test_config.TestClient.IPCPath) - Expect(err).NotTo(HaveOccurred()) - rpcClient := client.NewRPCClient(rawRPCClient, test_config.TestClient.IPCPath) - ethClient := ethclient.NewClient(rawRPCClient) - blockChainClient := client.NewEthClient(ethClient) - node := node.MakeNode(rpcClient) - transactionConverter := rpc2.NewRPCTransactionConverter(ethClient) - blockChain := eth.NewBlockChain(blockChainClient, rpcClient, node, transactionConverter) - - contract := testing.SampleContract() - var balance = new(big.Int) - - args := make([]interface{}, 1) - args[0] = common.HexToHash("0xd26114cd6ee289accf82350c8d8487fedb8a0c07") - - err = blockChain.FetchContractData(contract.Abi, "0xd26114cd6ee289accf82350c8d8487fedb8a0c07", "balanceOf", args, &balance, 5167471) - Expect(err).NotTo(HaveOccurred()) - expected := new(big.Int) - expected.SetString("10897295492887612977137", 10) - Expect(balance).To(Equal(expected)) - }) - }) -}) diff --git a/integration_test/contract_watcher_header_sync_transformer_test.go b/integration_test/contract_watcher_header_sync_transformer_test.go deleted file mode 100644 index 0e0d3c9d..00000000 --- a/integration_test/contract_watcher_header_sync_transformer_test.go +++ /dev/null @@ -1,496 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package integration - -import ( - "fmt" - "strings" - - "github.com/ethereum/go-ethereum/common" - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - - "github.com/vulcanize/vulcanizedb/pkg/config" - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/header/transformer" - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/constants" - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/helpers/test_helpers" - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/helpers/test_helpers/mocks" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" - "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres/repositories" - "github.com/vulcanize/vulcanizedb/pkg/postgres" -) - -var _ = Describe("contractWatcher headerSync transformer", func() { - var db *postgres.DB - var err error - var blockChain core.BlockChain - var headerRepository repositories.HeaderRepository - var headerID int64 - var ensAddr = strings.ToLower(constants.EnsContractAddress) // 0x314159265dd8dbb310642f98f50c066173c1259b - var tusdAddr = strings.ToLower(constants.TusdContractAddress) // 0x8dd5fbce2f6a956c3022ba3663759011dd51e73e - - BeforeEach(func() { - db, blockChain = test_helpers.SetupDBandBC() - headerRepository = repositories.NewHeaderRepository(db) - }) - - AfterEach(func() { - test_helpers.TearDown(db) - }) - - Describe("Init", func() { - It("Initializes transformer's contract objects", func() { - _, insertErr := headerRepository.CreateOrUpdateHeader(mocks.MockHeader1) - Expect(insertErr).NotTo(HaveOccurred()) - _, insertErrTwo := headerRepository.CreateOrUpdateHeader(mocks.MockHeader3) - Expect(insertErrTwo).NotTo(HaveOccurred()) - t := transformer.NewTransformer(test_helpers.TusdConfig, blockChain, db) - err = t.Init() - Expect(err).ToNot(HaveOccurred()) - - c, ok := t.Contracts[tusdAddr] - Expect(ok).To(Equal(true)) - - // TODO: Fix this - // This test sometimes randomly fails because - // for some reason the starting block number is not updated from - // its original value (5197514) to the block number (6194632) - // of the earliest header (mocks.MockHeader1) in the repository - // It is not clear how this happens without one of the above insertErrs - // having been thrown and without any errors thrown during the Init() call - Expect(c.StartingBlock).To(Equal(int64(6194632))) - Expect(c.Abi).To(Equal(constants.TusdAbiString)) - Expect(c.Name).To(Equal("TrueUSD")) - Expect(c.Address).To(Equal(tusdAddr)) - }) - - It("initializes when no headers available in db", func() { - t := transformer.NewTransformer(test_helpers.TusdConfig, blockChain, db) - err = t.Init() - Expect(err).ToNot(HaveOccurred()) - }) - - It("Does nothing if nothing if no addresses are configured", func() { - _, insertErr := headerRepository.CreateOrUpdateHeader(mocks.MockHeader1) - Expect(insertErr).NotTo(HaveOccurred()) - _, insertErrTwo := headerRepository.CreateOrUpdateHeader(mocks.MockHeader3) - Expect(insertErrTwo).NotTo(HaveOccurred()) - var testConf config.ContractConfig - testConf = test_helpers.TusdConfig - testConf.Addresses = nil - t := transformer.NewTransformer(testConf, blockChain, db) - err = t.Init() - Expect(err).ToNot(HaveOccurred()) - - _, ok := t.Contracts[tusdAddr] - Expect(ok).To(Equal(false)) - }) - }) - - Describe("Execute- against TrueUSD contract", func() { - BeforeEach(func() { - header1, err := blockChain.GetHeaderByNumber(6791668) - Expect(err).ToNot(HaveOccurred()) - header2, err := blockChain.GetHeaderByNumber(6791669) - Expect(err).ToNot(HaveOccurred()) - header3, err := blockChain.GetHeaderByNumber(6791670) - Expect(err).ToNot(HaveOccurred()) - headerRepository.CreateOrUpdateHeader(header1) - headerID, err = headerRepository.CreateOrUpdateHeader(header2) - Expect(err).ToNot(HaveOccurred()) - headerRepository.CreateOrUpdateHeader(header3) - }) - - It("Transforms watched contract data into custom repositories", func() { - t := transformer.NewTransformer(test_helpers.TusdConfig, blockChain, db) - err = t.Init() - Expect(err).ToNot(HaveOccurred()) - err = t.Execute() - Expect(err).ToNot(HaveOccurred()) - - log := test_helpers.HeaderSyncTransferLog{} - err = db.QueryRowx(fmt.Sprintf("SELECT * FROM header_%s.transfer_event", tusdAddr)).StructScan(&log) - Expect(err).ToNot(HaveOccurred()) - // We don't know vulcID, so compare individual fields instead of complete structures - Expect(log.HeaderID).To(Equal(headerID)) - Expect(log.From).To(Equal("0x1062a747393198f70F71ec65A582423Dba7E5Ab3")) - Expect(log.To).To(Equal("0x2930096dB16b4A44Ecd4084EA4bd26F7EeF1AEf0")) - Expect(log.Value).To(Equal("9998940000000000000000")) - }) - - It("Keeps track of contract-related addresses while transforming event data if they need to be used for later method polling", func() { - var testConf config.ContractConfig - testConf = test_helpers.TusdConfig - testConf.Methods = map[string][]string{ - tusdAddr: {"balanceOf"}, - } - t := transformer.NewTransformer(testConf, blockChain, db) - err = t.Init() - Expect(err).ToNot(HaveOccurred()) - c, ok := t.Contracts[tusdAddr] - Expect(ok).To(Equal(true)) - err = t.Execute() - Expect(err).ToNot(HaveOccurred()) - Expect(len(c.EmittedAddrs)).To(Equal(4)) - Expect(len(c.EmittedHashes)).To(Equal(0)) - - b, ok := c.EmittedAddrs[common.HexToAddress("0x1062a747393198f70F71ec65A582423Dba7E5Ab3")] - Expect(ok).To(Equal(true)) - Expect(b).To(Equal(true)) - - b, ok = c.EmittedAddrs[common.HexToAddress("0x2930096dB16b4A44Ecd4084EA4bd26F7EeF1AEf0")] - Expect(ok).To(Equal(true)) - Expect(b).To(Equal(true)) - - b, ok = c.EmittedAddrs[common.HexToAddress("0x571A326f5B15E16917dC17761c340c1ec5d06f6d")] - Expect(ok).To(Equal(true)) - Expect(b).To(Equal(true)) - - b, ok = c.EmittedAddrs[common.HexToAddress("0xFBb1b73C4f0BDa4f67dcA266ce6Ef42f520fBB98")] - Expect(ok).To(Equal(true)) - Expect(b).To(Equal(true)) - - _, ok = c.EmittedAddrs[common.HexToAddress("0x09BbBBE21a5975cAc061D82f7b843b1234567890")] - Expect(ok).To(Equal(false)) - - _, ok = c.EmittedAddrs[common.HexToAddress("0x")] - Expect(ok).To(Equal(false)) - - _, ok = c.EmittedAddrs[""] - Expect(ok).To(Equal(false)) - - _, ok = c.EmittedAddrs[common.HexToAddress("0x09THISE21a5IS5cFAKE1D82fAND43bCE06MADEUP")] - Expect(ok).To(Equal(false)) - }) - - It("Polls given methods using generated token holder address", func() { - var testConf config.ContractConfig - testConf = test_helpers.TusdConfig - testConf.Methods = map[string][]string{ - tusdAddr: {"balanceOf"}, - } - t := transformer.NewTransformer(testConf, blockChain, db) - err = t.Init() - Expect(err).ToNot(HaveOccurred()) - err = t.Execute() - Expect(err).ToNot(HaveOccurred()) - - res := test_helpers.BalanceOf{} - err = db.QueryRowx(fmt.Sprintf("SELECT * FROM header_%s.balanceof_method WHERE who_ = '0x1062a747393198f70F71ec65A582423Dba7E5Ab3' AND block = '6791669'", tusdAddr)).StructScan(&res) - Expect(err).ToNot(HaveOccurred()) - Expect(res.Balance).To(Equal("55849938025000000000000")) - Expect(res.TokenName).To(Equal("TrueUSD")) - - err = db.QueryRowx(fmt.Sprintf("SELECT * FROM header_%s.balanceof_method WHERE who_ = '0x09BbBBE21a5975cAc061D82f7b843b1234567890' AND block = '6791669'", tusdAddr)).StructScan(&res) - Expect(err).To(HaveOccurred()) - Expect(err.Error()).To(ContainSubstring("no rows in result set")) - }) - - It("Fails if initialization has not been done", func() { - t := transformer.NewTransformer(test_helpers.TusdConfig, blockChain, db) - err = t.Execute() - Expect(err).To(HaveOccurred()) - Expect(err.Error()).To(ContainSubstring("transformer has no initialized contracts")) - }) - }) - - Describe("Execute- against ENS registry contract", func() { - BeforeEach(func() { - header1, err := blockChain.GetHeaderByNumber(6885695) - Expect(err).ToNot(HaveOccurred()) - header2, err := blockChain.GetHeaderByNumber(6885696) - Expect(err).ToNot(HaveOccurred()) - header3, err := blockChain.GetHeaderByNumber(6885697) - Expect(err).ToNot(HaveOccurred()) - headerRepository.CreateOrUpdateHeader(header1) - headerID, err = headerRepository.CreateOrUpdateHeader(header2) - Expect(err).ToNot(HaveOccurred()) - headerRepository.CreateOrUpdateHeader(header3) - }) - - It("Transforms watched contract data into custom repositories", func() { - t := transformer.NewTransformer(test_helpers.ENSConfig, blockChain, db) - err = t.Init() - Expect(err).ToNot(HaveOccurred()) - err = t.Execute() - Expect(err).ToNot(HaveOccurred()) - Expect(t.Start).To(Equal(int64(6885698))) - - log := test_helpers.HeaderSyncNewOwnerLog{} - err = db.QueryRowx(fmt.Sprintf("SELECT * FROM header_%s.newowner_event", ensAddr)).StructScan(&log) - Expect(err).ToNot(HaveOccurred()) - // We don't know vulcID, so compare individual fields instead of complete structures - Expect(log.HeaderID).To(Equal(headerID)) - Expect(log.Node).To(Equal("0x93cdeb708b7545dc668eb9280176169d1c33cfd8ed6f04690a0bcc88a93fc4ae")) - Expect(log.Label).To(Equal("0x95832c7a47ff8a7840e28b78ce695797aaf402b1c186bad9eca28842625b5047")) - Expect(log.Owner).To(Equal("0x6090A6e47849629b7245Dfa1Ca21D94cd15878Ef")) - }) - - It("Keeps track of contract-related hashes while transforming event data if they need to be used for later method polling", func() { - var testConf config.ContractConfig - testConf = test_helpers.ENSConfig - testConf.Methods = map[string][]string{ - ensAddr: {"owner"}, - } - t := transformer.NewTransformer(testConf, blockChain, db) - err = t.Init() - Expect(err).ToNot(HaveOccurred()) - c, ok := t.Contracts[ensAddr] - Expect(ok).To(Equal(true)) - err = t.Execute() - Expect(err).ToNot(HaveOccurred()) - Expect(len(c.EmittedHashes)).To(Equal(2)) - Expect(len(c.EmittedAddrs)).To(Equal(0)) - - b, ok := c.EmittedHashes[common.HexToHash("0x93cdeb708b7545dc668eb9280176169d1c33cfd8ed6f04690a0bcc88a93fc4ae")] - Expect(ok).To(Equal(true)) - Expect(b).To(Equal(true)) - - b, ok = c.EmittedHashes[common.HexToHash("0x95832c7a47ff8a7840e28b78ce695797aaf402b1c186bad9eca28842625b5047")] - Expect(ok).To(Equal(true)) - Expect(b).To(Equal(true)) - - // Doesn't keep track of address since it wouldn't be used in calling the 'owner' method - _, ok = c.EmittedAddrs[common.HexToAddress("0x6090A6e47849629b7245Dfa1Ca21D94cd15878Ef")] - Expect(ok).To(Equal(false)) - }) - - It("Polls given method using list of collected hashes", func() { - var testConf config.ContractConfig - testConf = test_helpers.ENSConfig - testConf.Methods = map[string][]string{ - ensAddr: {"owner"}, - } - t := transformer.NewTransformer(testConf, blockChain, db) - err = t.Init() - Expect(err).ToNot(HaveOccurred()) - err = t.Execute() - Expect(err).ToNot(HaveOccurred()) - - res := test_helpers.Owner{} - err = db.QueryRowx(fmt.Sprintf("SELECT * FROM header_%s.owner_method WHERE node_ = '0x93cdeb708b7545dc668eb9280176169d1c33cfd8ed6f04690a0bcc88a93fc4ae' AND block = '6885696'", ensAddr)).StructScan(&res) - Expect(err).ToNot(HaveOccurred()) - Expect(res.Address).To(Equal("0x6090A6e47849629b7245Dfa1Ca21D94cd15878Ef")) - Expect(res.TokenName).To(Equal("")) - - err = db.QueryRowx(fmt.Sprintf("SELECT * FROM header_%s.owner_method WHERE node_ = '0x95832c7a47ff8a7840e28b78ce695797aaf402b1c186bad9eca28842625b5047' AND block = '6885696'", ensAddr)).StructScan(&res) - Expect(err).ToNot(HaveOccurred()) - Expect(res.Address).To(Equal("0x0000000000000000000000000000000000000000")) - Expect(res.TokenName).To(Equal("")) - - err = db.QueryRowx(fmt.Sprintf("SELECT * FROM header_%s.owner_method WHERE node_ = '0x9THIS110dcc444fIS242510c09bbAbe21aFAKEcacNODE82f7b843HASH61ba391' AND block = '6885696'", ensAddr)).StructScan(&res) - Expect(err).To(HaveOccurred()) - Expect(err.Error()).To(ContainSubstring("no rows in result set")) - }) - - It("It does not persist events if they do not pass the emitted arg filter", func() { - var testConf config.ContractConfig - testConf = test_helpers.ENSConfig - testConf.EventArgs = map[string][]string{ - ensAddr: {"fake_filter_value"}, - } - t := transformer.NewTransformer(testConf, blockChain, db) - err = t.Init() - Expect(err).ToNot(HaveOccurred()) - err = t.Execute() - Expect(err).ToNot(HaveOccurred()) - - log := test_helpers.HeaderSyncNewOwnerLog{} - err = db.QueryRowx(fmt.Sprintf("SELECT * FROM header_%s.newowner_event", ensAddr)).StructScan(&log) - Expect(err).To(HaveOccurred()) - Expect(err.Error()).To(ContainSubstring("does not exist")) - }) - - It("If a method arg filter is applied, only those arguments are used in polling", func() { - var testConf config.ContractConfig - testConf = test_helpers.ENSConfig - testConf.MethodArgs = map[string][]string{ - ensAddr: {"0x93cdeb708b7545dc668eb9280176169d1c33cfd8ed6f04690a0bcc88a93fc4ae"}, - } - testConf.Methods = map[string][]string{ - ensAddr: {"owner"}, - } - t := transformer.NewTransformer(testConf, blockChain, db) - err = t.Init() - Expect(err).ToNot(HaveOccurred()) - err = t.Execute() - Expect(err).ToNot(HaveOccurred()) - - res := test_helpers.Owner{} - err = db.QueryRowx(fmt.Sprintf("SELECT * FROM header_%s.owner_method WHERE node_ = '0x93cdeb708b7545dc668eb9280176169d1c33cfd8ed6f04690a0bcc88a93fc4ae' AND block = '6885696'", ensAddr)).StructScan(&res) - Expect(err).ToNot(HaveOccurred()) - Expect(res.Address).To(Equal("0x6090A6e47849629b7245Dfa1Ca21D94cd15878Ef")) - Expect(res.TokenName).To(Equal("")) - - err = db.QueryRowx(fmt.Sprintf("SELECT * FROM header_%s.owner_method WHERE node_ = '0x95832c7a47ff8a7840e28b78ce695797aaf402b1c186bad9eca28842625b5047' AND block = '6885696'", ensAddr)).StructScan(&res) - Expect(err).To(HaveOccurred()) - Expect(err.Error()).To(ContainSubstring("no rows in result set")) - }) - }) - - Describe("Execute- against both ENS and TrueUSD", func() { - BeforeEach(func() { - for i := 6885692; i <= 6885701; i++ { - header, err := blockChain.GetHeaderByNumber(int64(i)) - Expect(err).ToNot(HaveOccurred()) - _, err = headerRepository.CreateOrUpdateHeader(header) - Expect(err).ToNot(HaveOccurred()) - } - }) - - It("Transforms watched contract data into custom repositories", func() { - t := transformer.NewTransformer(test_helpers.ENSandTusdConfig, blockChain, db) - err = t.Init() - Expect(err).ToNot(HaveOccurred()) - err = t.Execute() - Expect(err).ToNot(HaveOccurred()) - Expect(t.Start).To(Equal(int64(6885702))) - - newOwnerLog := test_helpers.HeaderSyncNewOwnerLog{} - err = db.QueryRowx(fmt.Sprintf("SELECT * FROM header_%s.newowner_event", ensAddr)).StructScan(&newOwnerLog) - Expect(err).ToNot(HaveOccurred()) - // We don't know vulcID, so compare individual fields instead of complete structures - Expect(newOwnerLog.Node).To(Equal("0x93cdeb708b7545dc668eb9280176169d1c33cfd8ed6f04690a0bcc88a93fc4ae")) - Expect(newOwnerLog.Label).To(Equal("0x95832c7a47ff8a7840e28b78ce695797aaf402b1c186bad9eca28842625b5047")) - Expect(newOwnerLog.Owner).To(Equal("0x6090A6e47849629b7245Dfa1Ca21D94cd15878Ef")) - - transferLog := test_helpers.HeaderSyncTransferLog{} - err = db.QueryRowx(fmt.Sprintf("SELECT * FROM header_%s.transfer_event", tusdAddr)).StructScan(&transferLog) - Expect(err).ToNot(HaveOccurred()) - // We don't know vulcID, so compare individual fields instead of complete structures - Expect(transferLog.From).To(Equal("0x8cA465764873E71CEa525F5EB6AE973d650c22C2")) - Expect(transferLog.To).To(Equal("0xc338482360651E5D30BEd77b7c85358cbBFB2E0e")) - Expect(transferLog.Value).To(Equal("2800000000000000000000")) - }) - - It("Marks header checked for a contract that has no logs at that header", func() { - t := transformer.NewTransformer(test_helpers.ENSandTusdConfig, blockChain, db) - err = t.Init() - Expect(err).ToNot(HaveOccurred()) - err = t.Execute() - Expect(err).ToNot(HaveOccurred()) - Expect(t.Start).To(Equal(int64(6885702))) - - newOwnerLog := test_helpers.HeaderSyncNewOwnerLog{} - err = db.QueryRowx(fmt.Sprintf("SELECT * FROM header_%s.newowner_event", ensAddr)).StructScan(&newOwnerLog) - Expect(err).ToNot(HaveOccurred()) - transferLog := test_helpers.HeaderSyncTransferLog{} - err = db.QueryRowx(fmt.Sprintf("SELECT * FROM header_%s.transfer_event", tusdAddr)).StructScan(&transferLog) - Expect(err).ToNot(HaveOccurred()) - Expect(transferLog.HeaderID).ToNot(Equal(newOwnerLog.HeaderID)) - - type checkedHeader struct { - ID int64 `db:"id"` - HeaderID int64 `db:"header_id"` - NewOwner int64 `db:"newowner_0x314159265dd8dbb310642f98f50c066173c1259b"` - Transfer int64 `db:"transfer_0x8dd5fbce2f6a956c3022ba3663759011dd51e73e"` - } - - transferCheckedHeader := new(checkedHeader) - err = db.QueryRowx("SELECT * FROM public.checked_headers WHERE header_id = $1", transferLog.HeaderID).StructScan(transferCheckedHeader) - Expect(err).ToNot(HaveOccurred()) - Expect(transferCheckedHeader.Transfer).To(Equal(int64(1))) - Expect(transferCheckedHeader.NewOwner).To(Equal(int64(1))) - - newOwnerCheckedHeader := new(checkedHeader) - err = db.QueryRowx("SELECT * FROM public.checked_headers WHERE header_id = $1", newOwnerLog.HeaderID).StructScan(newOwnerCheckedHeader) - Expect(err).ToNot(HaveOccurred()) - Expect(newOwnerCheckedHeader.NewOwner).To(Equal(int64(1))) - Expect(newOwnerCheckedHeader.Transfer).To(Equal(int64(1))) - }) - - It("Keeps track of contract-related hashes and addresses while transforming event data if they need to be used for later method polling", func() { - var testConf config.ContractConfig - testConf = test_helpers.ENSandTusdConfig - testConf.Methods = map[string][]string{ - ensAddr: {"owner"}, - tusdAddr: {"balanceOf"}, - } - t := transformer.NewTransformer(testConf, blockChain, db) - err = t.Init() - Expect(err).ToNot(HaveOccurred()) - ens, ok := t.Contracts[ensAddr] - Expect(ok).To(Equal(true)) - tusd, ok := t.Contracts[tusdAddr] - Expect(ok).To(Equal(true)) - err = t.Execute() - Expect(err).ToNot(HaveOccurred()) - Expect(len(ens.EmittedHashes)).To(Equal(2)) - Expect(len(ens.EmittedAddrs)).To(Equal(0)) - Expect(len(tusd.EmittedAddrs)).To(Equal(2)) - Expect(len(tusd.EmittedHashes)).To(Equal(0)) - - b, ok := ens.EmittedHashes[common.HexToHash("0x93cdeb708b7545dc668eb9280176169d1c33cfd8ed6f04690a0bcc88a93fc4ae")] - Expect(ok).To(Equal(true)) - Expect(b).To(Equal(true)) - - b, ok = ens.EmittedHashes[common.HexToHash("0x95832c7a47ff8a7840e28b78ce695797aaf402b1c186bad9eca28842625b5047")] - Expect(ok).To(Equal(true)) - Expect(b).To(Equal(true)) - - b, ok = tusd.EmittedAddrs[common.HexToAddress("0x8cA465764873E71CEa525F5EB6AE973d650c22C2")] - Expect(ok).To(Equal(true)) - Expect(b).To(Equal(true)) - - b, ok = tusd.EmittedAddrs[common.HexToAddress("0xc338482360651E5D30BEd77b7c85358cbBFB2E0e")] - Expect(ok).To(Equal(true)) - Expect(b).To(Equal(true)) - - _, ok = tusd.EmittedAddrs[common.HexToAddress("0x6090A6e47849629b7245Dfa1Ca21D94cd15878Ef")] - Expect(ok).To(Equal(false)) - }) - - It("Polls given methods for each contract, using list of collected values", func() { - var testConf config.ContractConfig - testConf = test_helpers.ENSandTusdConfig - testConf.Methods = map[string][]string{ - ensAddr: {"owner"}, - tusdAddr: {"balanceOf"}, - } - t := transformer.NewTransformer(testConf, blockChain, db) - err = t.Init() - Expect(err).ToNot(HaveOccurred()) - err = t.Execute() - Expect(err).ToNot(HaveOccurred()) - - owner := test_helpers.Owner{} - err = db.QueryRowx(fmt.Sprintf("SELECT * FROM header_%s.owner_method WHERE node_ = '0x93cdeb708b7545dc668eb9280176169d1c33cfd8ed6f04690a0bcc88a93fc4ae' AND block = '6885696'", ensAddr)).StructScan(&owner) - Expect(err).ToNot(HaveOccurred()) - Expect(owner.Address).To(Equal("0x6090A6e47849629b7245Dfa1Ca21D94cd15878Ef")) - Expect(owner.TokenName).To(Equal("")) - - err = db.QueryRowx(fmt.Sprintf("SELECT * FROM header_%s.owner_method WHERE node_ = '0x95832c7a47ff8a7840e28b78ce695797aaf402b1c186bad9eca28842625b5047' AND block = '6885696'", ensAddr)).StructScan(&owner) - Expect(err).ToNot(HaveOccurred()) - Expect(owner.Address).To(Equal("0x0000000000000000000000000000000000000000")) - Expect(owner.TokenName).To(Equal("")) - - err = db.QueryRowx(fmt.Sprintf("SELECT * FROM header_%s.owner_method WHERE node_ = '0x95832c7a47ff8a7840e28b78ceMADEUPaaf4HASHc186badTHItransformers.8IS625bFAKE' AND block = '6885696'", ensAddr)).StructScan(&owner) - Expect(err).To(HaveOccurred()) - Expect(err.Error()).To(ContainSubstring("no rows in result set")) - - bal := test_helpers.BalanceOf{} - err = db.QueryRowx(fmt.Sprintf("SELECT * FROM header_%s.balanceof_method WHERE who_ = '0x8cA465764873E71CEa525F5EB6AE973d650c22C2' AND block = '6885701'", tusdAddr)).StructScan(&bal) - Expect(err).ToNot(HaveOccurred()) - Expect(bal.Balance).To(Equal("1954436000000000000000")) - Expect(bal.TokenName).To(Equal("TrueUSD")) - - err = db.QueryRowx(fmt.Sprintf("SELECT * FROM header_%s.balanceof_method WHERE who_ = '0x09BbBBE21a5975cAc061D82f7b843b1234567890' AND block = '6885701'", tusdAddr)).StructScan(&bal) - Expect(err).To(HaveOccurred()) - Expect(err.Error()).To(ContainSubstring("no rows in result set")) - }) - }) -}) diff --git a/integration_test/geth_blockchain_test.go b/integration_test/geth_blockchain_test.go deleted file mode 100644 index 3d4582c9..00000000 --- a/integration_test/geth_blockchain_test.go +++ /dev/null @@ -1,121 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package integration_test - -import ( - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/ethclient" - "github.com/ethereum/go-ethereum/rpc" - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - - "github.com/vulcanize/vulcanizedb/pkg/eth" - "github.com/vulcanize/vulcanizedb/pkg/eth/client" - rpc2 "github.com/vulcanize/vulcanizedb/pkg/eth/converters/rpc" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" - "github.com/vulcanize/vulcanizedb/pkg/eth/node" - "github.com/vulcanize/vulcanizedb/test_config" -) - -var _ = Describe("Reading from the Geth blockchain", func() { - var blockChain *eth.BlockChain - - BeforeEach(func() { - rawRPCClient, err := rpc.Dial(test_config.TestClient.IPCPath) - Expect(err).NotTo(HaveOccurred()) - rpcClient := client.NewRPCClient(rawRPCClient, test_config.TestClient.IPCPath) - ethClient := ethclient.NewClient(rawRPCClient) - blockChainClient := client.NewEthClient(ethClient) - node := node.MakeNode(rpcClient) - transactionConverter := rpc2.NewRPCTransactionConverter(ethClient) - blockChain = eth.NewBlockChain(blockChainClient, rpcClient, node, transactionConverter) - }) - - It("retrieves the genesis block and first block", func(done Done) { - genesisBlock, err := blockChain.GetBlockByNumber(int64(0)) - Expect(err).ToNot(HaveOccurred()) - firstBlock, err := blockChain.GetBlockByNumber(int64(1)) - Expect(err).ToNot(HaveOccurred()) - lastBlockNumber, err := blockChain.LastBlock() - - Expect(err).NotTo(HaveOccurred()) - Expect(genesisBlock.Number).To(Equal(int64(0))) - Expect(firstBlock.Number).To(Equal(int64(1))) - Expect(lastBlockNumber.Int64()).To(BeNumerically(">", 0)) - close(done) - }, 15) - - It("retrieves the node info", func(done Done) { - node := blockChain.Node() - - Expect(node.GenesisBlock).ToNot(BeNil()) - Expect(node.NetworkID).To(Equal("1.000000")) - Expect(len(node.ID)).ToNot(BeZero()) - Expect(node.ClientName).ToNot(BeZero()) - - close(done) - }, 15) - - It("retrieves transaction", func() { - // actual transaction: https://etherscan.io/tx/0x44d462f2a19ad267e276b234a62c542fc91c974d2e4754a325ca405f95440255 - txHash := common.HexToHash("0x44d462f2a19ad267e276b234a62c542fc91c974d2e4754a325ca405f95440255") - transactions, err := blockChain.GetTransactions([]common.Hash{txHash}) - - Expect(err).NotTo(HaveOccurred()) - Expect(len(transactions)).To(Equal(1)) - expectedData := []byte{149, 227, 197, 11, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 1, 160, 85, 105, 13, 157, 184, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, - 241, 202, 218, 90, 30, 178, 234, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 92, 155, 193, 43} - expectedRaw := []byte{248, 201, 9, 132, 59, 154, 202, 0, 131, 1, 102, 93, 148, 44, 75, 208, 100, 185, 152, 131, - 128, 118, 250, 52, 26, 131, 208, 7, 252, 47, 165, 9, 87, 128, 184, 100, 149, 227, 197, 11, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 160, 85, 105, 13, 157, 184, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 241, 202, 218, 90, 30, 178, 234, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 92, 155, 193, 43, 37, 160, 237, 184, 236, 248, 23, 152, - 53, 238, 44, 215, 181, 234, 229, 157, 246, 212, 178, 88, 25, 116, 134, 163, 124, 64, 2, 66, 25, 118, 1, 253, 27, - 101, 160, 36, 226, 116, 43, 147, 236, 124, 76, 227, 250, 228, 168, 22, 19, 248, 155, 248, 151, 219, 14, 1, 186, - 159, 35, 154, 22, 222, 123, 254, 147, 63, 221} - expectedModel := core.TransactionModel{ - Data: expectedData, - From: "0x3b08b99441086edd66f36f9f9aee733280698378", - GasLimit: 91741, - GasPrice: 1000000000, - Hash: "0x44d462f2a19ad267e276b234a62c542fc91c974d2e4754a325ca405f95440255", - Nonce: 9, - Raw: expectedRaw, - Receipt: core.Receipt{}, - To: "0x2c4bd064b998838076fa341a83d007fc2fa50957", - TxIndex: 30, - Value: "0", - } - Expect(transactions[0]).To(Equal(expectedModel)) - }) - - //Benchmarking test: remove skip to test performance of block retrieval - XMeasure("retrieving n blocks", func(b Benchmarker) { - b.Time("runtime", func() { - var blocks []core.Block - n := 10 - for i := 5327459; i > 5327459-n; i-- { - block, err := blockChain.GetBlockByNumber(int64(i)) - Expect(err).ToNot(HaveOccurred()) - blocks = append(blocks, block) - } - Expect(len(blocks)).To(Equal(n)) - }) - }, 10) -}) diff --git a/integration_test/getter_test.go b/integration_test/getter_test.go deleted file mode 100644 index a5c05986..00000000 --- a/integration_test/getter_test.go +++ /dev/null @@ -1,57 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package integration_test - -import ( - "github.com/ethereum/go-ethereum/ethclient" - "github.com/ethereum/go-ethereum/rpc" - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - - "github.com/vulcanize/vulcanizedb/pkg/eth" - "github.com/vulcanize/vulcanizedb/pkg/eth/client" - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/constants" - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/getter" - rpc2 "github.com/vulcanize/vulcanizedb/pkg/eth/converters/rpc" - "github.com/vulcanize/vulcanizedb/pkg/eth/node" - "github.com/vulcanize/vulcanizedb/test_config" -) - -var _ = Describe("Interface Getter", func() { - Describe("GetAbi", func() { - It("Constructs and returns a custom abi based on results from supportsInterface calls", func() { - expectedABI := `[` + constants.AddrChangeInterface + `,` + constants.NameChangeInterface + `,` + constants.ContentChangeInterface + `,` + constants.AbiChangeInterface + `,` + constants.PubkeyChangeInterface + `]` - con := test_config.TestClient - testIPC := con.IPCPath - blockNumber := int64(6885696) - rawRpcClient, err := rpc.Dial(testIPC) - Expect(err).NotTo(HaveOccurred()) - rpcClient := client.NewRPCClient(rawRpcClient, testIPC) - ethClient := ethclient.NewClient(rawRpcClient) - blockChainClient := client.NewEthClient(ethClient) - node := node.MakeNode(rpcClient) - transactionConverter := rpc2.NewRPCTransactionConverter(ethClient) - blockChain := eth.NewBlockChain(blockChainClient, rpcClient, node, transactionConverter) - interfaceGetter := getter.NewInterfaceGetter(blockChain) - abi, err := interfaceGetter.GetABI(constants.PublicResolverAddress, blockNumber) - Expect(err).NotTo(HaveOccurred()) - Expect(abi).To(Equal(expectedABI)) - _, err = eth.ParseAbi(abi) - Expect(err).ToNot(HaveOccurred()) - }) - }) -}) diff --git a/integration_test/poller_test.go b/integration_test/poller_test.go deleted file mode 100644 index a1ea02bc..00000000 --- a/integration_test/poller_test.go +++ /dev/null @@ -1,252 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package integration_test - -import ( - "fmt" - - "github.com/ethereum/go-ethereum/common" - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/constants" - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/contract" - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/helpers/test_helpers" - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/poller" - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/types" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" - "github.com/vulcanize/vulcanizedb/pkg/postgres" -) - -var _ = Describe("Poller", func() { - - var contractPoller poller.Poller - var con *contract.Contract - var db *postgres.DB - var bc core.BlockChain - - AfterEach(func() { - test_helpers.TearDown(db) - }) - - Describe("Full sync mode", func() { - BeforeEach(func() { - db, bc = test_helpers.SetupDBandBC() - contractPoller = poller.NewPoller(bc, db, types.FullSync) - }) - - Describe("PollContract", func() { - It("Polls specified contract methods using contract's argument list", func() { - con = test_helpers.SetupTusdContract(nil, []string{"balanceOf"}) - Expect(con.Abi).To(Equal(constants.TusdAbiString)) - con.StartingBlock = 6707322 - con.AddEmittedAddr(common.HexToAddress("0xfE9e8709d3215310075d67E3ed32A380CCf451C8"), common.HexToAddress("0x3f5CE5FBFe3E9af3971dD833D26bA9b5C936f0bE")) - - err := contractPoller.PollContract(*con, 6707323) - Expect(err).ToNot(HaveOccurred()) - - scanStruct := test_helpers.BalanceOf{} - - err = db.QueryRowx(fmt.Sprintf("SELECT * FROM full_%s.balanceof_method WHERE who_ = '0xfE9e8709d3215310075d67E3ed32A380CCf451C8' AND block = '6707322'", constants.TusdContractAddress)).StructScan(&scanStruct) - Expect(err).ToNot(HaveOccurred()) - Expect(scanStruct.Balance).To(Equal("66386309548896882859581786")) - Expect(scanStruct.TokenName).To(Equal("TrueUSD")) - - err = db.QueryRowx(fmt.Sprintf("SELECT * FROM full_%s.balanceof_method WHERE who_ = '0xfE9e8709d3215310075d67E3ed32A380CCf451C8' AND block = '6707323'", constants.TusdContractAddress)).StructScan(&scanStruct) - Expect(err).ToNot(HaveOccurred()) - Expect(scanStruct.Balance).To(Equal("66386309548896882859581786")) - Expect(scanStruct.TokenName).To(Equal("TrueUSD")) - - err = db.QueryRowx(fmt.Sprintf("SELECT * FROM full_%s.balanceof_method WHERE who_ = '0x3f5CE5FBFe3E9af3971dD833D26bA9b5C936f0bE' AND block = '6707322'", constants.TusdContractAddress)).StructScan(&scanStruct) - Expect(err).ToNot(HaveOccurred()) - Expect(scanStruct.Balance).To(Equal("17982350181394112023885864")) - Expect(scanStruct.TokenName).To(Equal("TrueUSD")) - - err = db.QueryRowx(fmt.Sprintf("SELECT * FROM full_%s.balanceof_method WHERE who_ = '0x3f5CE5FBFe3E9af3971dD833D26bA9b5C936f0bE' AND block = '6707323'", constants.TusdContractAddress)).StructScan(&scanStruct) - Expect(err).ToNot(HaveOccurred()) - Expect(scanStruct.Balance).To(Equal("17982350181394112023885864")) - Expect(scanStruct.TokenName).To(Equal("TrueUSD")) - }) - - It("Polls specified contract methods using contract's hash list", func() { - con = test_helpers.SetupENSContract(nil, []string{"owner"}) - Expect(con.Abi).To(Equal(constants.ENSAbiString)) - Expect(len(con.Methods)).To(Equal(1)) - con.AddEmittedHash(common.HexToHash("0x93cdeb708b7545dc668eb9280176169d1c33cfd8ed6f04690a0bcc88a93fc4ae"), common.HexToHash("0x7e74a86b6e146964fb965db04dc2590516da77f720bb6759337bf5632415fd86")) - - err := contractPoller.PollContractAt(*con, 6885877) - Expect(err).ToNot(HaveOccurred()) - - scanStruct := test_helpers.Owner{} - - err = db.QueryRowx(fmt.Sprintf("SELECT * FROM full_%s.owner_method WHERE node_ = '0x7e74a86b6e146964fb965db04dc2590516da77f720bb6759337bf5632415fd86' AND block = '6885877'", constants.EnsContractAddress)).StructScan(&scanStruct) - Expect(err).ToNot(HaveOccurred()) - Expect(scanStruct.Address).To(Equal("0x546aA2EaE2514494EeaDb7bbb35243348983C59d")) - Expect(scanStruct.TokenName).To(Equal("ENS-Registry")) - - err = db.QueryRowx(fmt.Sprintf("SELECT * FROM full_%s.owner_method WHERE node_ = '0x93cdeb708b7545dc668eb9280176169d1c33cfd8ed6f04690a0bcc88a93fc4ae' AND block = '6885877'", constants.EnsContractAddress)).StructScan(&scanStruct) - Expect(err).ToNot(HaveOccurred()) - Expect(scanStruct.Address).To(Equal("0x6090A6e47849629b7245Dfa1Ca21D94cd15878Ef")) - Expect(scanStruct.TokenName).To(Equal("ENS-Registry")) - }) - - It("Does not poll and persist any methods if none are specified", func() { - con = test_helpers.SetupTusdContract(nil, nil) - Expect(con.Abi).To(Equal(constants.TusdAbiString)) - con.StartingBlock = 6707322 - con.AddEmittedAddr(common.HexToAddress("0xfE9e8709d3215310075d67E3ed32A380CCf451C8"), common.HexToAddress("0x3f5CE5FBFe3E9af3971dD833D26bA9b5C936f0bE")) - - err := contractPoller.PollContract(*con, 6707323) - Expect(err).ToNot(HaveOccurred()) - - scanStruct := test_helpers.BalanceOf{} - - err = db.QueryRowx(fmt.Sprintf("SELECT * FROM full_%s.balanceof_method WHERE who_ = '0xfE9e8709d3215310075d67E3ed32A380CCf451C8' AND block = '6707322'", constants.TusdContractAddress)).StructScan(&scanStruct) - Expect(err).To(HaveOccurred()) - }) - }) - - Describe("FetchContractData", func() { - It("Calls a single contract method", func() { - var name = new(string) - err := contractPoller.FetchContractData(constants.TusdAbiString, constants.TusdContractAddress, "name", nil, &name, 6197514) - Expect(err).ToNot(HaveOccurred()) - Expect(*name).To(Equal("TrueUSD")) - }) - }) - }) - - Describe("Header sync mode", func() { - BeforeEach(func() { - db, bc = test_helpers.SetupDBandBC() - contractPoller = poller.NewPoller(bc, db, types.HeaderSync) - }) - - Describe("PollContract", func() { - It("Polls specified contract methods using contract's token holder address list", func() { - con = test_helpers.SetupTusdContract(nil, []string{"balanceOf"}) - Expect(con.Abi).To(Equal(constants.TusdAbiString)) - con.StartingBlock = 6707322 - con.AddEmittedAddr(common.HexToAddress("0xfE9e8709d3215310075d67E3ed32A380CCf451C8"), common.HexToAddress("0x3f5CE5FBFe3E9af3971dD833D26bA9b5C936f0bE")) - - err := contractPoller.PollContract(*con, 6707323) - Expect(err).ToNot(HaveOccurred()) - - scanStruct := test_helpers.BalanceOf{} - - err = db.QueryRowx(fmt.Sprintf("SELECT * FROM header_%s.balanceof_method WHERE who_ = '0xfE9e8709d3215310075d67E3ed32A380CCf451C8' AND block = '6707322'", constants.TusdContractAddress)).StructScan(&scanStruct) - Expect(err).ToNot(HaveOccurred()) - Expect(scanStruct.Balance).To(Equal("66386309548896882859581786")) - Expect(scanStruct.TokenName).To(Equal("TrueUSD")) - - err = db.QueryRowx(fmt.Sprintf("SELECT * FROM header_%s.balanceof_method WHERE who_ = '0xfE9e8709d3215310075d67E3ed32A380CCf451C8' AND block = '6707323'", constants.TusdContractAddress)).StructScan(&scanStruct) - Expect(err).ToNot(HaveOccurred()) - Expect(scanStruct.Balance).To(Equal("66386309548896882859581786")) - Expect(scanStruct.TokenName).To(Equal("TrueUSD")) - - err = db.QueryRowx(fmt.Sprintf("SELECT * FROM header_%s.balanceof_method WHERE who_ = '0x3f5CE5FBFe3E9af3971dD833D26bA9b5C936f0bE' AND block = '6707322'", constants.TusdContractAddress)).StructScan(&scanStruct) - Expect(err).ToNot(HaveOccurred()) - Expect(scanStruct.Balance).To(Equal("17982350181394112023885864")) - Expect(scanStruct.TokenName).To(Equal("TrueUSD")) - - err = db.QueryRowx(fmt.Sprintf("SELECT * FROM header_%s.balanceof_method WHERE who_ = '0x3f5CE5FBFe3E9af3971dD833D26bA9b5C936f0bE' AND block = '6707323'", constants.TusdContractAddress)).StructScan(&scanStruct) - Expect(err).ToNot(HaveOccurred()) - Expect(scanStruct.Balance).To(Equal("17982350181394112023885864")) - Expect(scanStruct.TokenName).To(Equal("TrueUSD")) - }) - - It("Polls specified contract methods using contract's hash list", func() { - con = test_helpers.SetupENSContract(nil, []string{"owner"}) - Expect(con.Abi).To(Equal(constants.ENSAbiString)) - Expect(len(con.Methods)).To(Equal(1)) - con.AddEmittedHash(common.HexToHash("0x93cdeb708b7545dc668eb9280176169d1c33cfd8ed6f04690a0bcc88a93fc4ae"), common.HexToHash("0x7e74a86b6e146964fb965db04dc2590516da77f720bb6759337bf5632415fd86")) - - err := contractPoller.PollContractAt(*con, 6885877) - Expect(err).ToNot(HaveOccurred()) - - scanStruct := test_helpers.Owner{} - - err = db.QueryRowx(fmt.Sprintf("SELECT * FROM header_%s.owner_method WHERE node_ = '0x7e74a86b6e146964fb965db04dc2590516da77f720bb6759337bf5632415fd86' AND block = '6885877'", constants.EnsContractAddress)).StructScan(&scanStruct) - Expect(err).ToNot(HaveOccurred()) - Expect(scanStruct.Address).To(Equal("0x546aA2EaE2514494EeaDb7bbb35243348983C59d")) - Expect(scanStruct.TokenName).To(Equal("ENS-Registry")) - - err = db.QueryRowx(fmt.Sprintf("SELECT * FROM header_%s.owner_method WHERE node_ = '0x93cdeb708b7545dc668eb9280176169d1c33cfd8ed6f04690a0bcc88a93fc4ae' AND block = '6885877'", constants.EnsContractAddress)).StructScan(&scanStruct) - Expect(err).ToNot(HaveOccurred()) - Expect(scanStruct.Address).To(Equal("0x6090A6e47849629b7245Dfa1Ca21D94cd15878Ef")) - Expect(scanStruct.TokenName).To(Equal("ENS-Registry")) - }) - - It("Does not poll and persist any methods if none are specified", func() { - con = test_helpers.SetupTusdContract(nil, nil) - Expect(con.Abi).To(Equal(constants.TusdAbiString)) - con.StartingBlock = 6707322 - con.AddEmittedAddr(common.HexToAddress("0xfE9e8709d3215310075d67E3ed32A380CCf451C8"), common.HexToAddress("0x3f5CE5FBFe3E9af3971dD833D26bA9b5C936f0bE")) - - err := contractPoller.PollContract(*con, 6707323) - Expect(err).ToNot(HaveOccurred()) - - scanStruct := test_helpers.BalanceOf{} - - err = db.QueryRowx(fmt.Sprintf("SELECT * FROM header_%s.balanceof_method WHERE who_ = '0xfE9e8709d3215310075d67E3ed32A380CCf451C8' AND block = '6707322'", constants.TusdContractAddress)).StructScan(&scanStruct) - Expect(err).To(HaveOccurred()) - }) - - It("Caches returned values of the appropriate types for downstream method polling if method piping is turned on", func() { - con = test_helpers.SetupENSContract(nil, []string{"resolver"}) - Expect(con.Abi).To(Equal(constants.ENSAbiString)) - con.StartingBlock = 6921967 - con.EmittedAddrs = map[interface{}]bool{} - con.Piping = false - con.AddEmittedHash(common.HexToHash("0x495b6e6efdedb750aa519919b5cf282bdaa86067b82a2293a3ff5723527141e8")) - err := contractPoller.PollContract(*con, 6921968) - Expect(err).ToNot(HaveOccurred()) - - scanStruct := test_helpers.Resolver{} - err = db.QueryRowx(fmt.Sprintf("SELECT * FROM header_%s.resolver_method WHERE node_ = '0x495b6e6efdedb750aa519919b5cf282bdaa86067b82a2293a3ff5723527141e8' AND block = '6921967'", constants.EnsContractAddress)).StructScan(&scanStruct) - Expect(err).ToNot(HaveOccurred()) - Expect(scanStruct.Address).To(Equal("0x5FfC014343cd971B7eb70732021E26C35B744cc4")) - Expect(scanStruct.TokenName).To(Equal("ENS-Registry")) - Expect(len(con.EmittedAddrs)).To(Equal(0)) // With piping off the address is not saved - - test_helpers.TearDown(db) - db, bc = test_helpers.SetupDBandBC() - contractPoller = poller.NewPoller(bc, db, types.HeaderSync) - - con.Piping = true - err = contractPoller.PollContract(*con, 6921968) - Expect(err).ToNot(HaveOccurred()) - - err = db.QueryRowx(fmt.Sprintf("SELECT * FROM header_%s.resolver_method WHERE node_ = '0x495b6e6efdedb750aa519919b5cf282bdaa86067b82a2293a3ff5723527141e8' AND block = '6921967'", constants.EnsContractAddress)).StructScan(&scanStruct) - Expect(err).ToNot(HaveOccurred()) - Expect(scanStruct.Address).To(Equal("0x5FfC014343cd971B7eb70732021E26C35B744cc4")) - Expect(scanStruct.TokenName).To(Equal("ENS-Registry")) - Expect(len(con.EmittedAddrs)).To(Equal(1)) // With piping on it is saved - Expect(con.EmittedAddrs[common.HexToAddress("0x5FfC014343cd971B7eb70732021E26C35B744cc4")]).To(Equal(true)) - }) - }) - - Describe("FetchContractData", func() { - It("Calls a single contract method", func() { - var name = new(string) - err := contractPoller.FetchContractData(constants.TusdAbiString, constants.TusdContractAddress, "name", nil, &name, 6197514) - Expect(err).ToNot(HaveOccurred()) - Expect(*name).To(Equal("TrueUSD")) - }) - }) - }) -}) diff --git a/libraries/shared/README.md b/libraries/shared/README.md deleted file mode 100644 index 8a97476d..00000000 --- a/libraries/shared/README.md +++ /dev/null @@ -1,8 +0,0 @@ -# Shared Tools - -## Description -Code that is useful for or used by plugins written on top of VulcanizeDB. - -## Note -Much code in this directory may not be used outside of the tests, but don't delete it - it could be used by a plugin. -Renaming and/or deleting functions in this namespace requires a version bump to avoid breaking plugins. \ No newline at end of file diff --git a/libraries/shared/chunker/chunker_suite_test.go b/libraries/shared/chunker/chunker_suite_test.go deleted file mode 100644 index 6e7dd126..00000000 --- a/libraries/shared/chunker/chunker_suite_test.go +++ /dev/null @@ -1,35 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package chunker_test - -import ( - "io/ioutil" - "testing" - - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - log "github.com/sirupsen/logrus" -) - -func TestFactories(t *testing.T) { - RegisterFailHandler(Fail) - RunSpecs(t, "Shared Chunker Suite") -} - -var _ = BeforeSuite(func() { - log.SetOutput(ioutil.Discard) -}) diff --git a/libraries/shared/chunker/log_chunker.go b/libraries/shared/chunker/log_chunker.go deleted file mode 100644 index e1042e8b..00000000 --- a/libraries/shared/chunker/log_chunker.go +++ /dev/null @@ -1,69 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package chunker - -import ( - "strings" - - "github.com/ethereum/go-ethereum/common" - "github.com/vulcanize/vulcanizedb/libraries/shared/transformer" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" -) - -type Chunker interface { - AddConfig(transformerConfig transformer.EventTransformerConfig) - ChunkLogs(logs []core.HeaderSyncLog) map[string][]core.HeaderSyncLog -} - -type LogChunker struct { - AddressToNames map[string][]string - NameToTopic0 map[string]common.Hash -} - -// Returns a new log chunker with initialised maps. -// Needs to have configs added with `AddConfigs` to consider logs for the respective transformer. -func NewLogChunker() *LogChunker { - return &LogChunker{ - AddressToNames: map[string][]string{}, - NameToTopic0: map[string]common.Hash{}, - } -} - -// Configures the chunker by adding one config with more addresses and topics to consider. -func (chunker *LogChunker) AddConfig(transformerConfig transformer.EventTransformerConfig) { - for _, address := range transformerConfig.ContractAddresses { - var lowerCaseAddress = strings.ToLower(address) - chunker.AddressToNames[lowerCaseAddress] = append(chunker.AddressToNames[lowerCaseAddress], transformerConfig.TransformerName) - chunker.NameToTopic0[transformerConfig.TransformerName] = common.HexToHash(transformerConfig.Topic) - } -} - -// Goes through a slice of logs, associating relevant logs (matching addresses and topic) with transformers -func (chunker *LogChunker) ChunkLogs(logs []core.HeaderSyncLog) map[string][]core.HeaderSyncLog { - chunks := map[string][]core.HeaderSyncLog{} - for _, log := range logs { - // Topic0 is not unique to each transformer, also need to consider the contract address - relevantTransformers := chunker.AddressToNames[strings.ToLower(log.Log.Address.Hex())] - - for _, t := range relevantTransformers { - if chunker.NameToTopic0[t] == log.Log.Topics[0] { - chunks[t] = append(chunks[t], log) - } - } - } - return chunks -} diff --git a/libraries/shared/chunker/log_chunker_test.go b/libraries/shared/chunker/log_chunker_test.go deleted file mode 100644 index 011bc7ed..00000000 --- a/libraries/shared/chunker/log_chunker_test.go +++ /dev/null @@ -1,163 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package chunker_test - -import ( - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/core/types" - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - chunk "github.com/vulcanize/vulcanizedb/libraries/shared/chunker" - "github.com/vulcanize/vulcanizedb/libraries/shared/transformer" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" -) - -var _ = Describe("Log chunker", func() { - var ( - chunker *chunk.LogChunker - ) - - BeforeEach(func() { - chunker = chunk.NewLogChunker() - - configA := transformer.EventTransformerConfig{ - TransformerName: "TransformerA", - ContractAddresses: []string{"0x00000000000000000000000000000000000000A1", "0x00000000000000000000000000000000000000A2"}, - Topic: "0xA", - } - chunker.AddConfig(configA) - - configB := transformer.EventTransformerConfig{ - TransformerName: "TransformerB", - ContractAddresses: []string{"0x00000000000000000000000000000000000000B1"}, - Topic: "0xB", - } - chunker.AddConfig(configB) - - configC := transformer.EventTransformerConfig{ - TransformerName: "TransformerC", - ContractAddresses: []string{"0x00000000000000000000000000000000000000A2"}, - Topic: "0xC", - } - chunker.AddConfig(configC) - }) - - Describe("initialisation", func() { - It("creates lookup maps correctly", func() { - Expect(chunker.AddressToNames).To(Equal(map[string][]string{ - "0x00000000000000000000000000000000000000a1": {"TransformerA"}, - "0x00000000000000000000000000000000000000a2": {"TransformerA", "TransformerC"}, - "0x00000000000000000000000000000000000000b1": {"TransformerB"}, - })) - - Expect(chunker.NameToTopic0).To(Equal(map[string]common.Hash{ - "TransformerA": common.HexToHash("0xA"), - "TransformerB": common.HexToHash("0xB"), - "TransformerC": common.HexToHash("0xC"), - })) - }) - }) - - Describe("AddConfig", func() { - It("can add more configs later", func() { - configD := transformer.EventTransformerConfig{ - TransformerName: "TransformerD", - ContractAddresses: []string{"0x000000000000000000000000000000000000000D"}, - Topic: "0xD", - } - chunker.AddConfig(configD) - - Expect(chunker.AddressToNames).To(ContainElement([]string{"TransformerD"})) - Expect(chunker.NameToTopic0).To(ContainElement(common.HexToHash("0xD"))) - }) - - It("lower cases address", func() { - configD := transformer.EventTransformerConfig{ - TransformerName: "TransformerD", - ContractAddresses: []string{"0x000000000000000000000000000000000000000D"}, - Topic: "0xD", - } - chunker.AddConfig(configD) - - Expect(chunker.AddressToNames["0x000000000000000000000000000000000000000d"]).To(Equal([]string{"TransformerD"})) - }) - }) - - Describe("ChunkLogs", func() { - It("only associates logs with relevant topic0 and address to transformers", func() { - logs := []core.HeaderSyncLog{log1, log2, log3, log4, log5} - chunks := chunker.ChunkLogs(logs) - - Expect(chunks["TransformerA"]).To(And(ContainElement(log1), ContainElement(log4))) - Expect(chunks["TransformerB"]).To(BeEmpty()) - Expect(chunks["TransformerC"]).To(ContainElement(log5)) - }) - }) -}) - -var ( - // Match TransformerA - log1 = core.HeaderSyncLog{ - Log: types.Log{ - Address: common.HexToAddress("0xA1"), - Topics: []common.Hash{ - common.HexToHash("0xA"), - common.HexToHash("0xLogTopic1"), - }, - }, - } - // Match TransformerA address, but not topic0 - log2 = core.HeaderSyncLog{ - Log: types.Log{ - Address: common.HexToAddress("0xA1"), - Topics: []common.Hash{ - common.HexToHash("0xB"), - common.HexToHash("0xLogTopic2"), - }, - }, - } - // Match TransformerA topic, but TransformerB address - log3 = core.HeaderSyncLog{ - Log: types.Log{ - Address: common.HexToAddress("0xB1"), - Topics: []common.Hash{ - common.HexToHash("0xA"), - common.HexToHash("0xLogTopic3"), - }, - }, - } - // Match TransformerA, with the other address - log4 = core.HeaderSyncLog{ - Log: types.Log{ - Address: common.HexToAddress("0xA2"), - Topics: []common.Hash{ - common.HexToHash("0xA"), - common.HexToHash("0xLogTopic4"), - }, - }, - } - // Match TransformerC, which shares address with TransformerA - log5 = core.HeaderSyncLog{ - Log: types.Log{ - Address: common.HexToAddress("0xA2"), - Topics: []common.Hash{ - common.HexToHash("0xC"), - common.HexToHash("0xLogTopic5"), - }, - }, - } -) diff --git a/libraries/shared/constants/checked_headers.go b/libraries/shared/constants/checked_headers.go deleted file mode 100644 index eea1214d..00000000 --- a/libraries/shared/constants/checked_headers.go +++ /dev/null @@ -1,25 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package constants - -type TransformerExecution bool - -const ( - HeaderRecheck TransformerExecution = true - HeaderUnchecked TransformerExecution = false - RecheckHeaderCap = int64(5) -) diff --git a/libraries/shared/constants/data.go b/libraries/shared/constants/data.go deleted file mode 100644 index f4803592..00000000 --- a/libraries/shared/constants/data.go +++ /dev/null @@ -1,19 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package constants - -var DataItemLength = 32 diff --git a/libraries/shared/constants/external.go b/libraries/shared/constants/external.go deleted file mode 100644 index 156d13c7..00000000 --- a/libraries/shared/constants/external.go +++ /dev/null @@ -1,80 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package constants - -import ( - "fmt" - "math" - - log "github.com/sirupsen/logrus" - "github.com/spf13/viper" -) - -var initialized = false - -func initConfig() { - if initialized { - return - } - - if err := viper.ReadInConfig(); err == nil { - log.Info("Using config file:", viper.ConfigFileUsed()) - } else { - panic(fmt.Sprintf("Could not find environment file: %v", err)) - } - initialized = true -} - -// GetMinDeploymentBlock gets the minimum deployment block for multiple contracts from config -func GetMinDeploymentBlock() uint64 { - initConfig() - contractNames := getContractNames() - if len(contractNames) < 1 { - log.Fatalf("No contracts supplied") - } - minBlock := uint64(math.MaxUint64) - for c := range contractNames { - deployed := getDeploymentBlock(c) - if deployed < minBlock { - minBlock = deployed - } - } - return minBlock -} - -func getContractNames() map[string]bool { - transformerNames := viper.GetStringSlice("exporter.transformerNames") - contractNames := make(map[string]bool) - for _, transformerName := range transformerNames { - configKey := "exporter." + transformerName + ".contracts" - names := viper.GetStringSlice(configKey) - for _, name := range names { - contractNames[name] = true - } - } - return contractNames -} - -func getDeploymentBlock(contractName string) uint64 { - configKey := "contract." + contractName + ".deployed" - value := viper.GetInt64(configKey) - if value < 0 { - log.Infof("No deployment block configured for contract \"%v\", defaulting to 0.", contractName) - return 0 - } - return uint64(value) -} diff --git a/libraries/shared/factories/event/README.md b/libraries/shared/factories/event/README.md deleted file mode 100644 index 3905bd99..00000000 --- a/libraries/shared/factories/event/README.md +++ /dev/null @@ -1,388 +0,0 @@ -# Watching Contract Events - -One approach VulcanizeDB takes to caching and indexing smart contracts is to watch contract events emitted in receipt logs. - -With a header synced vDB we can watch events by iterating over headers retrieved from the synced `headers` table and using these headers to -fetch and verify relevant event logs from a full Ethereum node, keeping track of which headers we have checked for which events -with our `checked_headers` table. - -## Assumptions - -This approach assumes you are running a vDB header sync which is run against a light Ethereum node; -this approach also assumes there is a full node available. - -Looking forward, we will be building fetchers that enable sourcing data from IPFS instead of an ETH node. - -## Shared Code - -VulcanizeDB has shared code built out for building and plugging in event transformers - -### [Event Watcher (header sync)](../staging/libraries/shared/watcher/event_watcher.go) - -The event watcher is responsible for continuously fetching and delegating chunks of logs and their associated header to the appropriate transformers. - -Using the `compose` or `composeAndExecute` command, event watchers can be loaded with plugin event transformers and execute over them. - -### [Event Transformer](../staging/libraries/shared/transformer/event_transformer.go) - -The event transformer is responsible for converting event logs into more useful data objects and storing them in Postgres. -The event transformer is composed of converter and repository interfaces and a config struct: -```go -type EventTransformer struct { - Config transformer.EventTransformerConfig - Converter Converter - Repository Repository -} -``` - -The event transformer executes over provided event logs at a given header. - -In this process, the converter unpacks these logs into entities and then converts these entities -to their final db models. These models are then written to the Postgres db by the repository. - -```go -func (transformer Transformer) Execute(logs []types.Log, header core.Header, recheckHeaders constants.TransformerExecution) error { - transformerName := transformer.Config.TransformerName - config := transformer.Config - - if len(logs) < 1 { - err := transformer.Repository.MarkHeaderChecked(header.Id) - if err != nil { - log.Printf("Error marking header as checked in %v: %v", transformerName, err) - return err - } - return nil - } - - entities, err := transformer.Converter.ToEntities(config.ContractAbi, logs) - if err != nil { - log.Printf("Error converting logs to entities in %v: %v", transformerName, err) - return err - } - - models, err := transformer.Converter.ToModels(entities) - if err != nil { - log.Printf("Error converting entities to models in %v: %v", transformerName, err) - return err - } - - err = transformer.Repository.Create(header.Id, models) - if err != nil { - log.Printf("Error persisting %v record: %v", transformerName, err) - return err - } - - return nil -} -``` - - -## Custom Code - -In order to watch events at a smart contract, for those events the developer must create: - -1. Config - struct to hold configuration information (contract address, starting block, event name and signature). -1. Entity - struct to unpack the event log into. -1. Model - struct representing the final data model we want to write to Postgres. -1. Converter - an interface which can unpack event logs into our entities and convert those entities to our models. -1. Repository - an interface to write our models to Postgres. -1. EventTransformerInitializer - a public variable which exports our configured transformer to be loaded as part of a plugin. -1. DB migrations - migrations to generate the Postgres schema, tables, views, function, etc that are needed to store and interface with the transformed data models. - -The example event we will use looks like: -``` -event ExampleEvent(bytes32 indexed arg1, address indexed arg2, bytes32 arg3, uint256 arg4, uint256 arg5); -``` - -### Config - -The config holds configuration variables for the event transformer, including a name for the transformer, the contract address -it is working at, the contract's ABI, the topic (e.g. event signature; topic0) that it is filtering for, and starting -and ending block numbers. - -```go -type EventTransformerConfig struct { - TransformerName string - ContractAddresses []string - ContractAbi string - Topic string - StartingBlockNumber int64 - EndingBlockNumber int64 // Set -1 for indefinite transformer -} -``` - -### Entity - -Entity field names for event arguments need to be exported and match the argument's name and type. LogIndex, -TransactionIndex, and the Raw log are retained in order to link the data to it's source for downstream validation. - -```go -type ExampleEntity struct { - Arg1 common.Hash - Arg2 common.Address - Arg3 common.Hash - Arg4 *big.Int - Arg5 *big.Int - LogIndex uint - TransactionIndex uint - Raw types.Log -} -``` - -### Model - -Model fields are not constrained by the event log structure. -This allows us to rename our fields, decode or convert our log values into more useful types, and perform operations -with or on the values before persisting the data to Postgres. - -```go -type ExampleModel struct { - EventHash string - UserAddress string - FractionSkimmed string - Surplus string - Deficit string - FinalPosition string - LogIndex uint - TransactionIndex uint - Raw types.Log -} -``` - -### Converter - -The converter needs to satisfy the interface. One for unpacking logs into the custom defined entities, and -another for converting those entities to their final db models. - -```go -type Converter interface { - ToEntities(contractAbi string, ethLog []types.Log) ([]interface{}, error) - ToModels([]interface{}) ([]interface{}, error) -} -``` - -For the example event, this might look like: -```go -type ExampleConverter struct{} - -func (ExampleConverter) ToEntities(contractAbi string, ethLogs []types.Log) ([]interface{}, error) { - var entities []interface{} - for _, ethLog := range ethLogs { - entity := &ExampleEntity{} - address := ethLog.Address - abi, err := geth.ParseAbi(contractAbi) - if err != nil { - return nil, err - } - - contract := bind.NewBoundContract(address, abi, nil, nil, nil) - - err = contract.UnpackLog(entity, "ExampleEvent", ethLog) - if err != nil { - return nil, err - } - - entity.Raw = ethLog - entity.LogIndex = ethLog.Index - entity.TransactionIndex = ethLog.TxIndex - - entities = append(entities, *entity) - } - - return entities, nil -} - -func (converter ExampleConverter) ToModels(entities []interface{}) ([]interface{}, error) { - var models []interface{} - for _, entity := range entities { - entity, ok := entity.(ExampleModel) - if !ok { - return nil, fmt.Errorf("entity of type %T, not %T", entity, ExampleModel{}) - } - - fractionSkimmed, err := hexutil.DecodeBig(entity.Arg3.Hex()) - if err != nil { - reuturn nil, err - } - position := new(big.Int) - position.Sub(entity.Arg4, entity.Arg5) - finalPosition := new(big.Int) - if preTaxPosition.Sign() < 0 { - finalPosition = position - } else { - skim := new(big.Int) - skim.Div(position, fractionSkimmed) - finalPosition = position.Sub(position, skim) - } - - rawLog, err := json.Marshal(entity.Raw) - if err != nil { - return nil, err - } - - model := ExampleModel{ - EventHash: entity.Arg1.Hex(), - UserAddress: entity.Arg2.Hex(), - FractionSkimmed: fractionSkimmed.String(), - Surplus: entity.Arg4.String(), - Deficit: entity.Arg5.String(), - FinalPosition: finalPosition, - LogIndex: entity.LogIndex, - TransactionIndex: entity.TransactionIndex, - Raw: rawLog, - } - models = append(models, model) - } - return models, nil -} -``` -Notice that in this example we have a bytes32 argument in the event that needs to be decoded to an integer before it can be worked with -to produce our hypothetical `FinalPosition` field. This is to highlight the fact that contracts can and sometimes do encode the -data types we want to work with into raw bytes. Writing custom transformers with these converters allows us to account for this. - -### Repository - -The repository needs to satisfy the interface and use the `Create` method to write the model to Postgres. -```go -type Repository interface { - Create(headerID int64, models []interface{}) error - MarkHeaderChecked(headerID int64) error - MissingHeaders(startingBlockNumber, endingBlockNumber int64) ([]core.Header, error) - RecheckHeaders(startingBlockNumber, endingBlockNUmber int64) ([]core.Header, error) - SetDB(db *postgres.DB) -} -``` - -For the example event, this might look like: -```go -type ExampleRepository struct { - db *postgres.DB -} - -func (repository *ExampleRepository) SetDB(db *postgres.DB) { - repository.db = db -} - -func (repository ExampleRepository) Create(headerID int64, models []interface{}) error { - tx, dBaseErr := repository.db.Beginx() - if dBaseErr != nil { - return dBaseErr - } - for _, model := range models { - model, ok := model.(ExampleModel) - if !ok { - rollbackErr := tx.Rollback() - if rollbackErr != nil { - log.Error("failed to rollback ", rollbackErr) - } - return fmt.Errorf("model of type %T, not %T", model, ExampleModel{}) - } - - _, execErr := tx.Exec( - `INSERT into example_schema.example_event (header_id, event_hash, user_address, fraction_skimmed, surplus, deficit, final_position, log_idx, tx_idx, raw_log) - VALUES($1, $2, $3, $4, $5, $6, $7, $8, $9, $10) - ON CONFLICT (header_id, tx_idx, log_idx) DO UPDATE SET event_hash = $2, user_address = $3, fraction_skimmed = $4, surplus = $5, deficit = $6, final_position = $7, raw_log = $10;`, - headerID, model.EventHash, model.UserAddress, model.FractonSkimmed, model.Surplus, model.Deficit, model.FinalPosition, model.LogIndex, model.TransactionIndex, model.Raw, - ) - if execErr != nil { - rollbackErr := tx.Rollback() - if rollbackErr != nil { - log.Error("failed to rollback ", rollbackErr) - } - return execErr - } - } - - checkHeaderErr := repo.MarkHeaderCheckedInTransaction(headerID, tx, "example_event_checked") - if checkHeaderErr != nil { - rollbackErr := tx.Rollback() - if rollbackErr != nil { - log.Error("failed to rollback ", rollbackErr) - } - return checkHeaderErr - } - - return tx.Commit() -} - -func (repository ExampleRepository) MarkHeaderChecked(headerID int64) error { - return repo.MarkHeaderChecked(headerID, repository.db, "example_event_checked") -} - -func (repository ExampleRepository) MissingHeaders(startingBlockNumber int64, endingBlockNumber int64) ([]core.Header, error) { - return repo.MissingHeaders(startingBlockNumber, endingBlockNumber, repository.db,"example_event_checked") -} - -func (repository ExampleRepository) RecheckHeaders(startingBlockNumber int64, endingBlockNumber int64) ([]core.Header, error) { - return repo.RecheckHeaders(startingBlockNumber, endingBlockNumber, repository.db, "example_event_checked") -} -``` - -### EventTransformerInitializer - -A transformer initializer variable needs to be exported from somewhere within the transformer repository so that the transformer can be -loaded as part of a plugin in the `compose` or `composeAndExecute` commands. It is important that this variable is named `EventTransformerInitializer` and -it must be of `type EventTransformerInitializer func(db *postgres.DB) EventTransformer`. - -```go -var EventTransformerInitializer transformer.EventTransformerInitializer = factories.Transformer{ - Config: exampleEventConfig, - Converter: ExampleConverter{}, - Repository: &ExampleRepository{}, -}.NewTransformer -``` - -### DB migrations - -We use `goose` as our migration management tool. Any Go data model that needs to be written to Postgres by the -repository needs a db migration for the corresponding Postgres data model. - -Each contract or set of transformers being watched should define its own namespace with a db schema: -```postgresql --- +goose Up -CREATE SCHEMA example_schema; - - --- +goose Down -DROP SCHEMA example_schema; - -``` - -For the example event and its resulting model, the table we write to would look like: -```postgresql --- +goose Up -CREATE TABLE example_schema.example_event ( - id SERIAL PRIMARY KEY, - header_id INTEGER NOT NULL REFERENCES headers (id) ON DELETE CASCADE, - event_hash CHARACTER VARYING(66) NOT NULL, - user_address CHARACTER VARYING(66) NOT NULL, - fraction_skimmed NUMERIC NOT NULL, - surplus NUMERIC NOT NULL, - deficit NUMERIC NOT NULL, - final_position NUMERIC NOT NULL, - tx_idx INTEGER NOT NUll, - log_idx INTEGER NOT NUll, - raw_log JSONB, - UNIQUE (header_id, tx_idx, log_idx) -); - -ALTER TABLE public.checked_headers - ADD COLUMN example_event_checked INTEGER NOT NULL DEFAULT 0; - - --- +goose Down -DROP TABLE example_schema.example_event; - -ALTER TABLE public.checked_headers - DROP COLUMN example_event_checked; -``` - -Notice that we have also added a column to the `checked_headers` table for this event so that we can keep track -of which headers we have already filtered through for this event. - -## Summary - -To create a transformer for a contract event we need to create entities for unpacking the raw log, models to represent -the final data structure, a converter to mediate this unpacking and conversion between entities to models, a repository to write -these models to Postgres, db migrations to accommodate these models in Postgres, and a EventTransformerInitializer to export the -configured transformer and load it as a plugin to the `compose` or `composeAndExecute` commands as described in the main readme. diff --git a/libraries/shared/factories/event/converter.go b/libraries/shared/factories/event/converter.go deleted file mode 100644 index beed46a4..00000000 --- a/libraries/shared/factories/event/converter.go +++ /dev/null @@ -1,28 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package event - -import ( - "github.com/vulcanize/vulcanizedb/pkg/eth/core" - "github.com/vulcanize/vulcanizedb/pkg/postgres" -) - -// Converter transforms log data into general InsertionModels the Repository can persist__ -type Converter interface { - ToModels(contractAbi string, ethLog []core.HeaderSyncLog) ([]InsertionModel, error) - SetDB(db *postgres.DB) -} diff --git a/libraries/shared/factories/event/factories_suite_test.go b/libraries/shared/factories/event/factories_suite_test.go deleted file mode 100644 index dcb38ed6..00000000 --- a/libraries/shared/factories/event/factories_suite_test.go +++ /dev/null @@ -1,35 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package event_test - -import ( - "io/ioutil" - "testing" - - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - log "github.com/sirupsen/logrus" -) - -func TestFactories(t *testing.T) { - RegisterFailHandler(Fail) - RunSpecs(t, "Factories Suite") -} - -var _ = BeforeSuite(func() { - log.SetOutput(ioutil.Discard) -}) diff --git a/libraries/shared/factories/event/repository.go b/libraries/shared/factories/event/repository.go deleted file mode 100644 index 934f9231..00000000 --- a/libraries/shared/factories/event/repository.go +++ /dev/null @@ -1,179 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package event - -import ( - "database/sql/driver" - "fmt" - "strings" - - "github.com/vulcanize/vulcanizedb/utils" - - "github.com/sirupsen/logrus" - "github.com/vulcanize/vulcanizedb/pkg/postgres" -) - -const SetLogTransformedQuery = `UPDATE public.header_sync_logs SET transformed = true WHERE id = $1` - -// Repository persists transformed values to the DB -type Repository interface { - Create(models []InsertionModel) error - SetDB(db *postgres.DB) -} - -// LogFK is the name of log foreign key columns -const LogFK ColumnName = "log_id" - -// AddressFK is the name of address foreign key columns -const AddressFK ColumnName = "address_id" - -// HeaderFK is the name of header foreign key columns -const HeaderFK ColumnName = "header_id" - -// SchemaName is the schema to work with -type SchemaName string - -// TableName identifies the table for inserting the data -type TableName string - -// ColumnName identifies columns on the given table -type ColumnName string - -// ColumnValues maps a column to the value for insertion. This is restricted to []byte, bool, float64, int64, string, time.Time -type ColumnValues map[ColumnName]interface{} - -// ErrUnsupportedValue is thrown when a model supplies a type of value the postgres driver cannot handle. -var ErrUnsupportedValue = func(value interface{}) error { - return fmt.Errorf("unsupported type of value supplied in model: %v (%T)", value, value) -} - -// InsertionModel is the generalised data structure a converter returns, and contains everything the repository needs to -// persist the converted data. -type InsertionModel struct { - SchemaName SchemaName - TableName TableName - OrderedColumns []ColumnName // Defines the fields to insert, and in which order the table expects them - ColumnValues ColumnValues // Associated values for columns, restricted to []byte, bool, float64, int64, string, time.Time -} - -// ModelToQuery stores memoised insertion queries to minimise computation -var ModelToQuery = map[string]string{} - -// GetMemoizedQuery gets/creates a DB insertion query for the model -func GetMemoizedQuery(model InsertionModel) string { - // The schema and table name uniquely determines the insertion query, use that for memoization - queryKey := string(model.SchemaName) + string(model.TableName) - query, queryMemoized := ModelToQuery[queryKey] - if !queryMemoized { - query = GenerateInsertionQuery(model) - ModelToQuery[queryKey] = query - } - return query -} - -// GenerateInsertionQuery creates an SQL insertion query from an insertion model. -// Should be called through GetMemoizedQuery, so the query is not generated on each call to Create. -func GenerateInsertionQuery(model InsertionModel) string { - var valuePlaceholders []string - var updateOnConflict []string - for i := 0; i < len(model.OrderedColumns); i++ { - valuePlaceholder := fmt.Sprintf("$%d", 1+i) - valuePlaceholders = append(valuePlaceholders, valuePlaceholder) - updateOnConflict = append(updateOnConflict, - fmt.Sprintf("%s = %s", model.OrderedColumns[i], valuePlaceholder)) - } - - baseQuery := `INSERT INTO %v.%v (%v) VALUES(%v) - ON CONFLICT (header_id, log_id) DO UPDATE SET %v;` - - return fmt.Sprintf(baseQuery, - model.SchemaName, - model.TableName, - joinOrderedColumns(model.OrderedColumns), - strings.Join(valuePlaceholders, ", "), - strings.Join(updateOnConflict, ", ")) -} - -/* -Create generates an insertion query and persists to the DB, given a slice of InsertionModels. -ColumnValues are restricted to []byte, bool, float64, int64, string, time.Time. - -testModel = shared.InsertionModel{ - SchemaName: "public" - TableName: "testEvent", - OrderedColumns: []string{"header_id", "log_id", "variable1"}, - ColumnValues: ColumnValues{ - "header_id": 303 - "log_id": "808", - "variable1": "value1", - }, -} -*/ -func Create(models []InsertionModel, db *postgres.DB) error { - if len(models) == 0 { - return fmt.Errorf("repository got empty model slice") - } - - tx, dbErr := db.Beginx() - if dbErr != nil { - return dbErr - } - - for _, model := range models { - // Maps can't be iterated over in a reliable manner, so we rely on OrderedColumns to define the order to insert - // tx.Exec is variadically typed in the args, so if we wrap in []interface{} we can apply them all automatically - var args []interface{} - for _, col := range model.OrderedColumns { - value := model.ColumnValues[col] - // Check whether or not PG can accept the type of value in the model - okPgValue := driver.IsValue(value) - if !okPgValue { - logrus.WithField("model", model).Errorf("PG cannot handle value of this type: %T", value) - return ErrUnsupportedValue(value) - } - args = append(args, value) - } - - insertionQuery := GetMemoizedQuery(model) - _, execErr := tx.Exec(insertionQuery, args...) // couldn't pass varying types in bulk with args :: []string - - if execErr != nil { - rollbackErr := tx.Rollback() - if rollbackErr != nil { - logrus.Error("failed to rollback ", rollbackErr) - } - return execErr - } - - _, logErr := tx.Exec(SetLogTransformedQuery, model.ColumnValues[LogFK]) - - if logErr != nil { - utils.RollbackAndLogFailure(tx, logErr, "header_sync_logs.transformed") - return logErr - } - } - - return tx.Commit() -} - -func joinOrderedColumns(columns []ColumnName) string { - var stringColumns []string - for _, columnName := range columns { - stringColumns = append(stringColumns, string(columnName)) - } - return strings.Join(stringColumns, ", ") -} diff --git a/libraries/shared/factories/event/repository_test.go b/libraries/shared/factories/event/repository_test.go deleted file mode 100644 index 88c978b6..00000000 --- a/libraries/shared/factories/event/repository_test.go +++ /dev/null @@ -1,205 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package event_test - -import ( - "fmt" - "math/big" - - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - "github.com/vulcanize/vulcanizedb/libraries/shared/factories/event" - "github.com/vulcanize/vulcanizedb/libraries/shared/test_data" - "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres/repositories" - "github.com/vulcanize/vulcanizedb/pkg/eth/fakes" - "github.com/vulcanize/vulcanizedb/pkg/postgres" - "github.com/vulcanize/vulcanizedb/test_config" -) - -var _ = Describe("Repository", func() { - var db *postgres.DB - - BeforeEach(func() { - db = test_config.NewTestDB(test_config.NewTestNode()) - test_config.CleanTestDB(db) - }) - - Describe("Create", func() { - const createTestEventTableQuery = `CREATE TABLE public.testEvent( - id SERIAL PRIMARY KEY, - header_id INTEGER NOT NULL REFERENCES headers (id) ON DELETE CASCADE, - log_id BIGINT NOT NULL REFERENCES header_sync_logs (id) ON DELETE CASCADE, - variable1 TEXT, - UNIQUE (header_id, log_id) - );` - - var ( - headerID, logID int64 - headerRepository repositories.HeaderRepository - testModel event.InsertionModel - ) - - BeforeEach(func() { - _, tableErr := db.Exec(createTestEventTableQuery) - Expect(tableErr).NotTo(HaveOccurred()) - headerRepository = repositories.NewHeaderRepository(db) - var insertHeaderErr error - headerID, insertHeaderErr = headerRepository.CreateOrUpdateHeader(fakes.FakeHeader) - Expect(insertHeaderErr).NotTo(HaveOccurred()) - headerSyncLog := test_data.CreateTestLog(headerID, db) - logID = headerSyncLog.ID - - testModel = event.InsertionModel{ - SchemaName: "public", - TableName: "testEvent", - OrderedColumns: []event.ColumnName{ - event.HeaderFK, event.LogFK, "variable1", - }, - ColumnValues: event.ColumnValues{ - event.HeaderFK: headerID, - event.LogFK: logID, - "variable1": "value1", - }, - } - }) - - AfterEach(func() { - db.MustExec(`DROP TABLE public.testEvent;`) - }) - - // Needs to run before the other tests, since those insert keys in map - It("memoizes queries", func() { - Expect(len(event.ModelToQuery)).To(Equal(0)) - event.GetMemoizedQuery(testModel) - Expect(len(event.ModelToQuery)).To(Equal(1)) - event.GetMemoizedQuery(testModel) - Expect(len(event.ModelToQuery)).To(Equal(1)) - }) - - It("persists a model to postgres", func() { - createErr := event.Create([]event.InsertionModel{testModel}, db) - Expect(createErr).NotTo(HaveOccurred()) - - var res TestEvent - dbErr := db.Get(&res, `SELECT log_id, variable1 FROM public.testEvent;`) - Expect(dbErr).NotTo(HaveOccurred()) - - Expect(res.LogID).To(Equal(fmt.Sprint(testModel.ColumnValues[event.LogFK]))) - Expect(res.Variable1).To(Equal(testModel.ColumnValues["variable1"])) - }) - - Describe("returns errors", func() { - It("for empty model slice", func() { - err := event.Create([]event.InsertionModel{}, db) - Expect(err).To(MatchError("repository got empty model slice")) - }) - - It("for failed SQL inserts", func() { - header := fakes.GetFakeHeader(1) - headerID, headerErr := headerRepository.CreateOrUpdateHeader(header) - Expect(headerErr).NotTo(HaveOccurred()) - - brokenModel := event.InsertionModel{ - SchemaName: "public", - TableName: "testEvent", - // Wrong name of last column compared to DB, will generate incorrect query - OrderedColumns: []event.ColumnName{ - event.HeaderFK, event.LogFK, "variable2", - }, - ColumnValues: event.ColumnValues{ - event.HeaderFK: headerID, - event.LogFK: logID, - "variable1": "value1", - }, - } - - // Remove cached queries, or we won't generate a new (incorrect) one - delete(event.ModelToQuery, "publictestEvent") - - createErr := event.Create([]event.InsertionModel{brokenModel}, db) - // Remove incorrect query, so other tests won't get it - delete(event.ModelToQuery, "publictestEvent") - - Expect(createErr).To(HaveOccurred()) - }) - - It("for unsupported types in ColumnValue", func() { - unsupportedValue := big.NewInt(5) - testModel = event.InsertionModel{ - SchemaName: "public", - TableName: "testEvent", - OrderedColumns: []event.ColumnName{ - event.HeaderFK, event.LogFK, "variable1", - }, - ColumnValues: event.ColumnValues{ - event.HeaderFK: headerID, - event.LogFK: logID, - "variable1": unsupportedValue, - }, - } - - createErr := event.Create([]event.InsertionModel{testModel}, db) - Expect(createErr).To(MatchError(event.ErrUnsupportedValue(unsupportedValue))) - }) - }) - - It("upserts queries with conflicting source", func() { - conflictingModel := event.InsertionModel{ - SchemaName: "public", - TableName: "testEvent", - OrderedColumns: []event.ColumnName{ - event.HeaderFK, event.LogFK, "variable1", - }, - ColumnValues: event.ColumnValues{ - event.HeaderFK: headerID, - event.LogFK: logID, - "variable1": "conflictingValue", - }, - } - - createErr := event.Create([]event.InsertionModel{testModel, conflictingModel}, db) - Expect(createErr).NotTo(HaveOccurred()) - - var res TestEvent - dbErr := db.Get(&res, `SELECT log_id, variable1 FROM public.testEvent;`) - Expect(dbErr).NotTo(HaveOccurred()) - Expect(res.Variable1).To(Equal(conflictingModel.ColumnValues["variable1"])) - }) - - It("generates correct queries", func() { - actualQuery := event.GenerateInsertionQuery(testModel) - expectedQuery := `INSERT INTO public.testEvent (header_id, log_id, variable1) VALUES($1, $2, $3) - ON CONFLICT (header_id, log_id) DO UPDATE SET header_id = $1, log_id = $2, variable1 = $3;` - Expect(actualQuery).To(Equal(expectedQuery)) - }) - - It("marks log transformed", func() { - createErr := event.Create([]event.InsertionModel{testModel}, db) - Expect(createErr).NotTo(HaveOccurred()) - - var logTransformed bool - getErr := db.Get(&logTransformed, `SELECT transformed FROM public.header_sync_logs WHERE id = $1`, logID) - Expect(getErr).NotTo(HaveOccurred()) - Expect(logTransformed).To(BeTrue()) - }) - }) -}) - -type TestEvent struct { - LogID string `db:"log_id"` - Variable1 string -} diff --git a/libraries/shared/factories/event/transformer.go b/libraries/shared/factories/event/transformer.go deleted file mode 100644 index 6aad61e0..00000000 --- a/libraries/shared/factories/event/transformer.go +++ /dev/null @@ -1,68 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package event - -import ( - "github.com/sirupsen/logrus" - "github.com/vulcanize/vulcanizedb/libraries/shared/transformer" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" - "github.com/vulcanize/vulcanizedb/pkg/postgres" -) - -type Transformer struct { - Config transformer.EventTransformerConfig - Converter Converter - Repository Repository -} - -func (transformer Transformer) NewTransformer(db *postgres.DB) transformer.EventTransformer { - transformer.Converter.SetDB(db) - transformer.Repository.SetDB(db) - return transformer -} - -func (transformer Transformer) Execute(logs []core.HeaderSyncLog) error { - transformerName := transformer.Config.TransformerName - config := transformer.Config - - if len(logs) < 1 { - return nil - } - - models, err := transformer.Converter.ToModels(config.ContractAbi, logs) - if err != nil { - logrus.Errorf("error converting entities to models in %v: %v", transformerName, err) - return err - } - - err = transformer.Repository.Create(models) - if err != nil { - logrus.Errorf("error persisting %v record: %v", transformerName, err) - return err - } - logrus.Debug("Persisted log for " + transformerName) - - return nil -} - -func (transformer Transformer) GetName() string { - return transformer.Config.TransformerName -} - -func (transformer Transformer) GetConfig() transformer.EventTransformerConfig { - return transformer.Config -} diff --git a/libraries/shared/factories/event/transformer_test.go b/libraries/shared/factories/event/transformer_test.go deleted file mode 100644 index 1f799106..00000000 --- a/libraries/shared/factories/event/transformer_test.go +++ /dev/null @@ -1,107 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package event_test - -import ( - "math/rand" - - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - "github.com/vulcanize/vulcanizedb/libraries/shared/factories/event" - "github.com/vulcanize/vulcanizedb/libraries/shared/mocks" - "github.com/vulcanize/vulcanizedb/libraries/shared/test_data" - "github.com/vulcanize/vulcanizedb/libraries/shared/transformer" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" - "github.com/vulcanize/vulcanizedb/pkg/eth/fakes" -) - -var _ = Describe("Transformer", func() { - var ( - repository mocks.MockEventRepository - converter mocks.MockConverter - t transformer.EventTransformer - headerOne core.Header - config = test_data.GenericTestConfig - logs []core.HeaderSyncLog - ) - - BeforeEach(func() { - repository = mocks.MockEventRepository{} - converter = mocks.MockConverter{} - - t = event.Transformer{ - Repository: &repository, - Converter: &converter, - Config: config, - }.NewTransformer(nil) - - headerOne = core.Header{ID: rand.Int63(), BlockNumber: rand.Int63()} - - logs = []core.HeaderSyncLog{{ - ID: 0, - HeaderID: headerOne.ID, - Log: test_data.GenericTestLog(), - Transformed: false, - }} - }) - - It("sets the db", func() { - Expect(repository.SetDbCalled).To(BeTrue()) - }) - - It("doesn't attempt to convert or persist an empty collection when there are no logs", func() { - err := t.Execute([]core.HeaderSyncLog{}) - - Expect(err).NotTo(HaveOccurred()) - Expect(converter.ToModelsCalledCounter).To(Equal(0)) - Expect(repository.CreateCalledCounter).To(Equal(0)) - }) - - It("converts an eth log to a model", func() { - err := t.Execute(logs) - - Expect(err).NotTo(HaveOccurred()) - Expect(converter.ContractAbi).To(Equal(config.ContractAbi)) - Expect(converter.LogsToConvert).To(Equal(logs)) - }) - - It("returns an error if converting to models fails", func() { - converter.ToModelsError = fakes.FakeError - - err := t.Execute(logs) - - Expect(err).To(HaveOccurred()) - Expect(err).To(MatchError(fakes.FakeError)) - }) - - It("persists the record", func() { - converter.ModelsToReturn = []event.InsertionModel{test_data.GenericModel} - - err := t.Execute(logs) - - Expect(err).NotTo(HaveOccurred()) - Expect(repository.PassedModels[0]).To(Equal(test_data.GenericModel)) - }) - - It("returns error if persisting the record fails", func() { - repository.SetCreateError(fakes.FakeError) - err := t.Execute(logs) - - Expect(err).To(HaveOccurred()) - Expect(err).To(MatchError(fakes.FakeError)) - }) -}) diff --git a/libraries/shared/factories/storage/EXAMPLE.md b/libraries/shared/factories/storage/EXAMPLE.md deleted file mode 100644 index 116edbfb..00000000 --- a/libraries/shared/factories/storage/EXAMPLE.md +++ /dev/null @@ -1,167 +0,0 @@ -# Storage Transformer Example - -In the Storage Transformer README, we went over code that needs to be written to add a new storage transformer to VulcanizeDB. -In this document, we'll go over an example contract and discuss how one would go about watching its storage. - -## Example Contract - -For the purposes of this document, we'll be assuming that we're interested in watching the following contract: - -```solidity -pragma solidity ^0.5.1; - -contract Contract { - uint256 public num_addresses; - mapping(address => uint) public addresses; - - event AddressAdded( - address addr, - uint256 num_addrs - ); - - constructor() public { - addresses[msg.sender] = 1; - num_addresses = 1; - } - - function add_address(address addr) public { - bool exists = addresses[addr] > 0; - addresses[addr]++; - if (!exists) { - emit AddressAdded(addr, ++num_addresses); - } - } -} -``` - -Disclaimer: this contract has not been audited and is not intended to be modeled or used in production. :) - -This contract persists two values in its storage: - -1. `num_addresses`: the total number of unique addresses known to the contract. -2. `addresses`: a mapping that records the number of times an address has been added to the contract. - -It also emits an event each time a new address is added into the contract's storage. - -## Custom Code - -In order to monitor the state of this smart contract, we'd need to implement: an event transformer, a mappings namespace, and a repository. -We will go through each of these in turn. - -### Event Transformer - -Given that the contract's storage includes a mapping, `addresses`, we will need to be able to identify the keys to that mapping that exist in the system so that we can recognize contract storage keys that correspond to non-zero values in that mapping. - -The simplest way to be aware of keys used in a contract's mapping is to listen for contract events that emit the keys that are used in its mapping(s). -Since this contract includes an event, `AddressAdded`, that is emitted each time a new address is added to the `addresses` mapping, we will want to listen for those events and cache the adddresses that map to non-zero values. - -Please see the event transformer README for detailed instructions about developing this code. -In short, it should be feasible to recognize `AddressAdded` events on the blockchain and parse them to keep a record of addresses that have been added to the system. - -### Mappings - -If we point an ethereum node at a blockchain hosting this contract and our node is equipped to write out storage changes happening on this contract, we will expect such changes to appear each time `add_address` (which modifies the `addresses` mapping) is called. - -In order for those changes - which include raw hex versions of storage keys and storage values, to be useful for us - we need to know how to recognize and parse them. -Our mappings file should assist us with both of these tasks: the `Lookup` function should recognize raw storage keys and return known metadata about the storage value. - -In order to perform this lookup, the mappings file should maintain its own mapping of known storage keys to the corresponding storage value metadata. -This internal mapping should contain the storage key for `num_addresses` as well as a storage key for each `addresses` key known to be associated with a non-zero value. - -#### num_addresses - -`num_addresses` is the first variable declared on the contract, and it is a simple (non-array, non-mapping) type. -Therefore, we know that its storage key is `0000000000000000000000000000000000000000000000000000000000000000`. -The storage key for non-array and non-mapping variables is (usually*) the index of the variable on the contract's storage. -If we see a storage diff being emitted from this contract with this storage key, we know that the `num_addresses` variable has been modified. - -In this case, we would expect that the call `mappings.Lookup("0000000000000000000000000000000000000000000000000000000000000000")` would return metadata corresponding to the `num_addresses` variable. -This metadata would probably look something like: - -```golang -shared.StorageValueMetadata{ - Name: "num_addresses", - Keys: nil, - Type: shared.Uint256, -} -``` - -* Occasionally, multiple variables may be packed into one storage slot, which complicates a direct translation of the index of the variable on the contract to its storage key. - -#### addresses - -`addresses` is the second variable declared on the contract, but it is a mapping. -Since it is a mapping, the storage key is more complex than `0000000000000000000000000000000000000000000000000000000000000001` (which would be the key for the variable if it were not an array or mapping). -Having a single storage slot for an entire mapping would not work, since there can be an arbitrary number of entries in a mapping, and a single storage value slot is constrained to 32 bytes. - -The way that smart contract mappings are maintained in storage (in Solidity) is by creating a new storage key/value pair for each entry in the mapping, where the storage key is a hash of the occupied slot's key concatenated with the mapping's index on the contract. -Given an occupied slot's key, `k`, and a mapping's index on the contract, `i`, we can generate the storage key with the following code: - -```golang -func GetMappingStorageKey(k, i string) string { - return common.BytesToHash(crypto.Keccak256(common.FromHex(k + i))).Hex() -} -``` - -If we were to call the contract's `add_address` function with `0xde0B295669a9FD93d5F28D9Ec85E40f4cb697BAe`, we would expect to see an `AddressAdded` event emitted, with `0xde0B295669a9FD93d5F28D9Ec85E40f4cb697BAe` in its payload. -From that event, we would know that there exists in the contract's storage a storage key of: - -```golang -GetMappingStorageKey("0xde0B295669a9FD93d5F28D9Ec85E40f4cb697BAe", "0000000000000000000000000000000000000000000000000000000000000001") -``` - -Executing the above code results in: `0x0f96a1133cfd5b94c329aa0526b5962bd791dbbfc481ca82f7d4a439e1e9bc40`. - -Therefore, the first time `add_address` was called for this address, we would also expect to see a storage diff with a key of `0x0f96a1133cfd5b94c329aa0526b5962bd791dbbfc481ca82f7d4a439e1e9bc40` and a value of `0000000000000000000000000000000000000000000000000000000000000001`. -This would be the indication that in contract storage, the address `0xde0B295669a9FD93d5F28D9Ec85E40f4cb697BAe` maps to the value 1. - -Given that we knew this address was a key in the mapping from our event transformer, we would expect a call to `mappings.Lookup("0x0f96a1133cfd5b94c329aa0526b5962bd791dbbfc481ca82f7d4a439e1e9bc40")` to return metadata corresponding to _this slot_ in the addresses mapping: - -```golang -shared.StorageValueMetadata{ - Name: "addresses, - Keys: map[Key]string{Address: "0xde0B295669a9FD93d5F28D9Ec85E40f4cb697BAe"}, - Type: shared.Uint256, -} -``` - -### Repository - -Once we have recognized a storage diff, we can decode the storage value to the data's known type. -Since the metadata tells us that the above values are `uint256`, we can decode a value like `0000000000000000000000000000000000000000000000000000000000000001` to `1`. - -The purpose of the contract-specific repository is to write that value to the database in a way that makes it useful for future queries. -Typically, this involves writing the block hash, block number, decoded value, and any keys in the metadata to a table. - -The current repository interface has a generalized `Create` function that can accept any arbitrary storage row along with its metadata. -This is deliberate, to facilitate shared use of the common storage transformer. -An implication of this decision is that the `Create` function typically includes a `switch` statement that selects which table to write to, as well as what data to include, based on the name of the variable as defined in the metadata. - -An example implementation of `Create` for our example contract above might look like: - -```golang -func (repository AddressStorageRepository) Create(blockNumber int, blockHash string, metadata shared.StorageValueMetadata, value interface{}) error { - switch metadata.Name { - case "num_addresses": - _, err := repository.db.Exec(`INSERT INTO storage.num_addresses (block_hash, block_number, n) VALUES ($1, $2, $3)`, - blockHash, blockNumber, value) - return err - case "addresses": - _, err := repository.db.Exec(`INSERT INTO storage.addresses (block_hash, block_number, address, n) VALUES ($1, $2, $3, $4)`, - blockHash, blockNumber, metadata.Keys[Address], value) - return err - default: - panic(fmt.Sprintf("unrecognized contract storage name: %s", metadata.Name)) - } -} -``` - -## Summary - -With our very simple address storing contract, we would be able to read its storage diffs by implementing an event transformer, a mappings, and a repository. - -The mappings would be able to lookup storage keys reflecting `num_addresses` or any slot in `addresses`, using addresses derived from watching the `AddressAdded` event for the latter. - -The repository would be able to persist the value or `num_addresses` or any slot in `addresses`, using metadata returned from the mappings. - -The mappings and repository could be plugged into the common storage transformer, enabling us to know the contract's state as it is changing. diff --git a/libraries/shared/factories/storage/README.md b/libraries/shared/factories/storage/README.md deleted file mode 100644 index f79e8987..00000000 --- a/libraries/shared/factories/storage/README.md +++ /dev/null @@ -1,140 +0,0 @@ -# Watching Contract Storage - -One approach VulcanizeDB takes to caching and indexing smart contracts is to ingest raw contract storage values. -Assuming that you are running an ethereum node that is writing contract storage changes to a CSV file, VulcanizeDB can parse them and persist the results to postgres. - -## Assumptions - -The current approach for caching smart contract storage diffs assumes that you are running a node that is writing contract storage diffs to a CSV file. -The CSV file is expected to have 5 columns: contract address, block hash, block number, storage key, storage value. - -We have [a branch on vulcanize/parity-ethereum](https://github.com/vulcanize/parity-ethereum/tree/watch-storage-diffs) that enables running a node that writes storage diffs this way. - -Looking forward, we would like to isolate this assumption as much as possible. -We may end up needing to read CSV data that is formatted differently, or reading data from a non-CSV source, and we do not want resulting changes to cascade throughout the codebase. - -## Shared Code - -VulcanizeDB has shared code for continuously reading from the CSV file written by the ethereum node and writing a parsed version of each row to postgres. - -### Storage Watcher - -The storage watcher is responsible for continuously delegating CSV rows to the appropriate transformer as they are being written by the ethereum node. -It maintains a mapping of contract addresses to transformers, and will ignore storage diff rows for contract addresses that do not have a corresponding transformer. - -Storage watchers can be loaded with plugin storage transformers and executed using the `composeAndExecute` command. - -### Storage Transformer - -The storage transformer is responsible for converting raw contract storage hex values into useful data and writing them to postgres. -The storage transformer depends on contract-specific implementations of code capable of recognizing storage keys and writing the matching (decoded) storage value to disk. - -```golang -func (transformer Transformer) Execute(row shared.StorageDiffRow) error { - metadata, lookupErr := transformer.StorageKeysLookup.Lookup(diff.StorageKey) - if lookupErr != nil { - return lookupErr - } - value, decodeErr := utils.Decode(diff, metadata) - if decodeErr != nil { - return decodeErr - } - return transformer.Repository.Create(diff.BlockHeight, diff.BlockHash.Hex(), metadata, value) -} -``` - -## Custom Code - -In order to watch an additional smart contract, a developer must create three things: - -1. StorageKeysLoader - identify keys in the contract's storage trie, providing metadata to describe how associated values should be decoded. -1. Repository - specify how to persist a parsed version of the storage value matching the recognized storage key. -1. Instance - create an instance of the storage transformer that uses your mappings and repository. - -### StorageKeysLoader - -A `StorageKeysLoader` is used by the `StorageKeysLookup` object on a storage transformer. - -```golang -type KeysLoader interface { - LoadMappings() (map[common.Hash]utils.StorageValueMetadata, error) - SetDB(db *postgres.DB) -} -``` - -When a key is not found, the lookup object refreshes its known keys by calling the loader. - -```golang -func (lookup *keysLookup) refreshMappings() error { - var err error - lookup.mappings, err = lookup.loader.LoadMappings() - if err != nil { - return err - } - lookup.mappings = utils.AddHashedKeys(lookup.mappings) - return nil -} -``` - -A contract-specific implementation of the loader enables the storage transformer to fetch metadata associated with a storage key. - -Storage metadata contains: the name of the variable matching the storage key, a raw version of any keys associated with the variable (if the variable is a mapping), and the variable's type. - -```golang -type StorageValueMetadata struct { - Name string - Keys map[Key]string - Type ValueType -} -``` - -The `Keys` field on the metadata is only relevant if the variable is a mapping. For example, in the following Solidity code: - -```solidity -pragma solidity ^0.4.0; - -contract Contract { - uint x; - mapping(address => uint) y; -} -``` - -The metadata for variable `x` would not have any associated keys, but the metadata for a storage key associated with `y` would include the address used to specify that key's index in the mapping. - -The `SetDB` function is required for the storage key loader to connect to the database. -A database connection may be desired when keys in a mapping variable need to be read from log events (e.g. to lookup what addresses may exist in `y`, above). - -### Repository - -```golang -type Repository interface { - Create(blockNumber int, blockHash string, metadata shared.StorageValueMetadata, value interface{}) error - SetDB(db *postgres.DB) -} -``` - -A contract-specific implementation of the repository interface enables the transformer to write the decoded storage value to the appropriate table in postgres. - -The `Create` function is expected to recognize and persist a given storage value by the variable's name, as indicated on the row's metadata. -Note: we advise silently discarding duplicates in `Create` - as it's possible that you may read the same diff several times, and an error will trigger the storage watcher to queue that diff for later processing. - -The `SetDB` function is required for the repository to connect to the database. - -### Instance - -```golang -type Transformer struct { - Address common.Address - Mappings storage_diffs.Mappings - Repository storage_diffs.Repository -} -``` - -A new instance of the storage transformer is initialized with the contract-specific mappings and repository, as well as the contract's address. -The contract's address is included so that the watcher can query that value from the transformer in order to build up its mapping of addresses to transformers. - -## Summary - -To begin watching an additional smart contract, create a new mappings file for looking up storage keys on that contract, a repository for writing storage values from the contract, and initialize a new storage transformer instance with the mappings, repository, and contract address. - -The new instance, wrapped in an initializer that calls `SetDB` on the mappings and repository, should be passed to the `AddTransformers` function on the storage watcher. \ No newline at end of file diff --git a/libraries/shared/factories/storage/keys_loader.go b/libraries/shared/factories/storage/keys_loader.go deleted file mode 100644 index 012b0361..00000000 --- a/libraries/shared/factories/storage/keys_loader.go +++ /dev/null @@ -1,28 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package storage - -import ( - "github.com/ethereum/go-ethereum/common" - "github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils" - "github.com/vulcanize/vulcanizedb/pkg/postgres" -) - -type KeysLoader interface { - LoadMappings() (map[common.Hash]utils.StorageValueMetadata, error) - SetDB(db *postgres.DB) -} diff --git a/libraries/shared/factories/storage/keys_lookup.go b/libraries/shared/factories/storage/keys_lookup.go deleted file mode 100644 index 71277301..00000000 --- a/libraries/shared/factories/storage/keys_lookup.go +++ /dev/null @@ -1,66 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package storage - -import ( - "github.com/ethereum/go-ethereum/common" - "github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils" - "github.com/vulcanize/vulcanizedb/pkg/postgres" -) - -type KeysLookup interface { - Lookup(key common.Hash) (utils.StorageValueMetadata, error) - SetDB(db *postgres.DB) -} - -type keysLookup struct { - loader KeysLoader - mappings map[common.Hash]utils.StorageValueMetadata -} - -func NewKeysLookup(loader KeysLoader) KeysLookup { - return &keysLookup{loader: loader, mappings: make(map[common.Hash]utils.StorageValueMetadata)} -} - -func (lookup *keysLookup) Lookup(key common.Hash) (utils.StorageValueMetadata, error) { - metadata, ok := lookup.mappings[key] - if !ok { - refreshErr := lookup.refreshMappings() - if refreshErr != nil { - return metadata, refreshErr - } - metadata, ok = lookup.mappings[key] - if !ok { - return metadata, utils.ErrStorageKeyNotFound{Key: key.Hex()} - } - } - return metadata, nil -} - -func (lookup *keysLookup) refreshMappings() error { - var err error - lookup.mappings, err = lookup.loader.LoadMappings() - if err != nil { - return err - } - lookup.mappings = utils.AddHashedKeys(lookup.mappings) - return nil -} - -func (lookup *keysLookup) SetDB(db *postgres.DB) { - lookup.loader.SetDB(db) -} diff --git a/libraries/shared/factories/storage/keys_lookup_test.go b/libraries/shared/factories/storage/keys_lookup_test.go deleted file mode 100644 index cc9d7cf9..00000000 --- a/libraries/shared/factories/storage/keys_lookup_test.go +++ /dev/null @@ -1,113 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package storage_test - -import ( - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/crypto" - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - "github.com/vulcanize/vulcanizedb/libraries/shared/factories/storage" - "github.com/vulcanize/vulcanizedb/libraries/shared/mocks" - "github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils" - "github.com/vulcanize/vulcanizedb/pkg/eth/fakes" - "github.com/vulcanize/vulcanizedb/test_config" -) - -var _ = Describe("Storage keys lookup", func() { - var ( - fakeMetadata = utils.GetStorageValueMetadata("name", map[utils.Key]string{}, utils.Uint256) - lookup storage.KeysLookup - loader *mocks.MockStorageKeysLoader - ) - - BeforeEach(func() { - loader = &mocks.MockStorageKeysLoader{} - lookup = storage.NewKeysLookup(loader) - }) - - Describe("Lookup", func() { - Describe("when key not found", func() { - It("refreshes keys", func() { - loader.StorageKeyMappings = map[common.Hash]utils.StorageValueMetadata{fakes.FakeHash: fakeMetadata} - _, err := lookup.Lookup(fakes.FakeHash) - - Expect(err).NotTo(HaveOccurred()) - Expect(loader.LoadMappingsCallCount).To(Equal(1)) - }) - - It("returns error if refreshing keys fails", func() { - loader.LoadMappingsError = fakes.FakeError - - _, err := lookup.Lookup(fakes.FakeHash) - - Expect(err).To(HaveOccurred()) - Expect(err).To(MatchError(fakes.FakeError)) - }) - }) - - Describe("when key found", func() { - BeforeEach(func() { - loader.StorageKeyMappings = map[common.Hash]utils.StorageValueMetadata{fakes.FakeHash: fakeMetadata} - _, err := lookup.Lookup(fakes.FakeHash) - Expect(err).NotTo(HaveOccurred()) - Expect(loader.LoadMappingsCallCount).To(Equal(1)) - }) - - It("does not refresh keys", func() { - _, err := lookup.Lookup(fakes.FakeHash) - - Expect(err).NotTo(HaveOccurred()) - Expect(loader.LoadMappingsCallCount).To(Equal(1)) - }) - }) - - It("returns metadata for loaded static key", func() { - loader.StorageKeyMappings = map[common.Hash]utils.StorageValueMetadata{fakes.FakeHash: fakeMetadata} - - metadata, err := lookup.Lookup(fakes.FakeHash) - - Expect(err).NotTo(HaveOccurred()) - Expect(metadata).To(Equal(fakeMetadata)) - }) - - It("returns metadata for hashed version of key (accommodates keys emitted from Geth)", func() { - loader.StorageKeyMappings = map[common.Hash]utils.StorageValueMetadata{fakes.FakeHash: fakeMetadata} - - hashedKey := common.BytesToHash(crypto.Keccak256(fakes.FakeHash.Bytes())) - metadata, err := lookup.Lookup(hashedKey) - - Expect(err).NotTo(HaveOccurred()) - Expect(metadata).To(Equal(fakeMetadata)) - }) - - It("returns key not found error if key not found", func() { - _, err := lookup.Lookup(fakes.FakeHash) - - Expect(err).To(HaveOccurred()) - Expect(err).To(MatchError(utils.ErrStorageKeyNotFound{Key: fakes.FakeHash.Hex()})) - }) - }) - - Describe("SetDB", func() { - It("sets the db on the loader", func() { - lookup.SetDB(test_config.NewTestDB(test_config.NewTestNode())) - - Expect(loader.SetDBCalled).To(BeTrue()) - }) - }) -}) diff --git a/libraries/shared/factories/storage/repository.go b/libraries/shared/factories/storage/repository.go deleted file mode 100644 index 07a43a85..00000000 --- a/libraries/shared/factories/storage/repository.go +++ /dev/null @@ -1,27 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package storage - -import ( - "github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils" - "github.com/vulcanize/vulcanizedb/pkg/postgres" -) - -type Repository interface { - Create(diffID int64, metadata utils.StorageValueMetadata, value interface{}) error - SetDB(db *postgres.DB) -} diff --git a/libraries/shared/factories/storage/storage_suite_test.go b/libraries/shared/factories/storage/storage_suite_test.go deleted file mode 100644 index 14fb90a0..00000000 --- a/libraries/shared/factories/storage/storage_suite_test.go +++ /dev/null @@ -1,29 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package storage_test - -import ( - "testing" - - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" -) - -func TestStorage(t *testing.T) { - RegisterFailHandler(Fail) - RunSpecs(t, "Storage Factories Suite") -} diff --git a/libraries/shared/factories/storage/transformer.go b/libraries/shared/factories/storage/transformer.go deleted file mode 100644 index 6ede4662..00000000 --- a/libraries/shared/factories/storage/transformer.go +++ /dev/null @@ -1,52 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package storage - -import ( - "github.com/ethereum/go-ethereum/common" - "github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils" - "github.com/vulcanize/vulcanizedb/libraries/shared/transformer" - "github.com/vulcanize/vulcanizedb/pkg/postgres" -) - -type Transformer struct { - HashedAddress common.Hash - StorageKeysLookup KeysLookup - Repository Repository -} - -func (transformer Transformer) NewTransformer(db *postgres.DB) transformer.StorageTransformer { - transformer.StorageKeysLookup.SetDB(db) - transformer.Repository.SetDB(db) - return transformer -} - -func (transformer Transformer) KeccakContractAddress() common.Hash { - return transformer.HashedAddress -} - -func (transformer Transformer) Execute(diff utils.PersistedStorageDiff) error { - metadata, lookupErr := transformer.StorageKeysLookup.Lookup(diff.StorageKey) - if lookupErr != nil { - return lookupErr - } - value, decodeErr := utils.Decode(diff, metadata) - if decodeErr != nil { - return decodeErr - } - return transformer.Repository.Create(diff.ID, metadata, value) -} diff --git a/libraries/shared/factories/storage/transformer_test.go b/libraries/shared/factories/storage/transformer_test.go deleted file mode 100644 index 4c51d2e6..00000000 --- a/libraries/shared/factories/storage/transformer_test.go +++ /dev/null @@ -1,160 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package storage_test - -import ( - "math/rand" - - "github.com/ethereum/go-ethereum/common" - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - "github.com/vulcanize/vulcanizedb/libraries/shared/factories/storage" - "github.com/vulcanize/vulcanizedb/libraries/shared/mocks" - "github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils" - "github.com/vulcanize/vulcanizedb/pkg/eth/fakes" -) - -var _ = Describe("Storage transformer", func() { - var ( - storageKeysLookup *mocks.MockStorageKeysLookup - repository *mocks.MockStorageRepository - t storage.Transformer - ) - - BeforeEach(func() { - storageKeysLookup = &mocks.MockStorageKeysLookup{} - repository = &mocks.MockStorageRepository{} - t = storage.Transformer{ - HashedAddress: common.Hash{}, - StorageKeysLookup: storageKeysLookup, - Repository: repository, - } - }) - - It("returns the contract address being watched", func() { - fakeAddress := utils.HexToKeccak256Hash("0x12345") - t.HashedAddress = fakeAddress - - Expect(t.KeccakContractAddress()).To(Equal(fakeAddress)) - }) - - It("looks up metadata for storage key", func() { - t.Execute(utils.PersistedStorageDiff{}) - - Expect(storageKeysLookup.LookupCalled).To(BeTrue()) - }) - - It("returns error if lookup fails", func() { - storageKeysLookup.LookupErr = fakes.FakeError - - err := t.Execute(utils.PersistedStorageDiff{}) - - Expect(err).To(HaveOccurred()) - Expect(err).To(MatchError(fakes.FakeError)) - }) - - It("creates storage row with decoded data", func() { - fakeMetadata := utils.StorageValueMetadata{Type: utils.Address} - storageKeysLookup.Metadata = fakeMetadata - rawValue := common.HexToAddress("0x12345") - fakeBlockNumber := 123 - fakeBlockHash := "0x67890" - fakeRow := utils.PersistedStorageDiff{ - ID: rand.Int63(), - StorageDiffInput: utils.StorageDiffInput{ - HashedAddress: common.Hash{}, - BlockHash: common.HexToHash(fakeBlockHash), - BlockHeight: fakeBlockNumber, - StorageKey: common.Hash{}, - StorageValue: rawValue.Hash(), - }, - } - - err := t.Execute(fakeRow) - - Expect(err).NotTo(HaveOccurred()) - Expect(repository.PassedDiffID).To(Equal(fakeRow.ID)) - Expect(repository.PassedMetadata).To(Equal(fakeMetadata)) - Expect(repository.PassedValue.(string)).To(Equal(rawValue.Hex())) - }) - - It("returns error if creating row fails", func() { - rawValue := common.HexToAddress("0x12345") - fakeMetadata := utils.StorageValueMetadata{Type: utils.Address} - storageKeysLookup.Metadata = fakeMetadata - repository.CreateErr = fakes.FakeError - diff := utils.PersistedStorageDiff{StorageDiffInput: utils.StorageDiffInput{StorageValue: rawValue.Hash()}} - - err := t.Execute(diff) - - Expect(err).To(HaveOccurred()) - Expect(err).To(MatchError(fakes.FakeError)) - }) - - Describe("when a storage row contains more than one item packed in storage", func() { - var ( - rawValue = common.HexToAddress("000000000000000000000000000000000000000000000002a300000000002a30") - fakeBlockNumber = 123 - fakeBlockHash = "0x67890" - packedTypes = make(map[int]utils.ValueType) - ) - packedTypes[0] = utils.Uint48 - packedTypes[1] = utils.Uint48 - - var fakeMetadata = utils.StorageValueMetadata{ - Name: "", - Keys: nil, - Type: utils.PackedSlot, - PackedTypes: packedTypes, - } - - It("passes the decoded data items to the repository", func() { - storageKeysLookup.Metadata = fakeMetadata - fakeRow := utils.PersistedStorageDiff{ - ID: rand.Int63(), - StorageDiffInput: utils.StorageDiffInput{ - HashedAddress: common.Hash{}, - BlockHash: common.HexToHash(fakeBlockHash), - BlockHeight: fakeBlockNumber, - StorageKey: common.Hash{}, - StorageValue: rawValue.Hash(), - }, - } - - err := t.Execute(fakeRow) - - Expect(err).NotTo(HaveOccurred()) - Expect(repository.PassedDiffID).To(Equal(fakeRow.ID)) - Expect(repository.PassedMetadata).To(Equal(fakeMetadata)) - expectedPassedValue := make(map[int]string) - expectedPassedValue[0] = "10800" - expectedPassedValue[1] = "172800" - Expect(repository.PassedValue.(map[int]string)).To(Equal(expectedPassedValue)) - }) - - It("returns error if creating a row fails", func() { - storageKeysLookup.Metadata = fakeMetadata - repository.CreateErr = fakes.FakeError - diff := utils.PersistedStorageDiff{StorageDiffInput: utils.StorageDiffInput{StorageValue: rawValue.Hash()}} - - err := t.Execute(diff) - - Expect(err).To(HaveOccurred()) - Expect(err).To(MatchError(fakes.FakeError)) - }) - }) -}) diff --git a/libraries/shared/fetcher/csv_tail_storage_fetcher.go b/libraries/shared/fetcher/csv_tail_storage_fetcher.go deleted file mode 100644 index 3d855c19..00000000 --- a/libraries/shared/fetcher/csv_tail_storage_fetcher.go +++ /dev/null @@ -1,49 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package fetcher - -import ( - "strings" - - "github.com/sirupsen/logrus" - "github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils" - "github.com/vulcanize/vulcanizedb/pkg/fs" -) - -type CsvTailStorageFetcher struct { - tailer fs.Tailer -} - -func NewCsvTailStorageFetcher(tailer fs.Tailer) CsvTailStorageFetcher { - return CsvTailStorageFetcher{tailer: tailer} -} - -func (storageFetcher CsvTailStorageFetcher) FetchStorageDiffs(out chan<- utils.StorageDiffInput, errs chan<- error) { - t, tailErr := storageFetcher.tailer.Tail() - if tailErr != nil { - errs <- tailErr - } - logrus.Debug("fetching storage diffs...") - for line := range t.Lines { - diff, parseErr := utils.FromParityCsvRow(strings.Split(line.Text, ",")) - if parseErr != nil { - errs <- parseErr - } else { - out <- diff - } - } -} diff --git a/libraries/shared/fetcher/csv_tail_storage_fetcher_test.go b/libraries/shared/fetcher/csv_tail_storage_fetcher_test.go deleted file mode 100644 index 48337308..00000000 --- a/libraries/shared/fetcher/csv_tail_storage_fetcher_test.go +++ /dev/null @@ -1,98 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package fetcher_test - -import ( - "fmt" - "strings" - "time" - - "github.com/ethereum/go-ethereum/common" - "github.com/hpcloud/tail" - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - "github.com/vulcanize/vulcanizedb/libraries/shared/fetcher" - "github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils" - "github.com/vulcanize/vulcanizedb/pkg/eth/fakes" -) - -var _ = Describe("Csv Tail Storage Fetcher", func() { - var ( - errorsChannel chan error - mockTailer *fakes.MockTailer - diffsChannel chan utils.StorageDiffInput - storageFetcher fetcher.CsvTailStorageFetcher - ) - - BeforeEach(func() { - errorsChannel = make(chan error) - diffsChannel = make(chan utils.StorageDiffInput) - mockTailer = fakes.NewMockTailer() - storageFetcher = fetcher.NewCsvTailStorageFetcher(mockTailer) - }) - - It("adds error to errors channel if tailing file fails", func(done Done) { - mockTailer.TailErr = fakes.FakeError - - go storageFetcher.FetchStorageDiffs(diffsChannel, errorsChannel) - - Expect(<-errorsChannel).To(MatchError(fakes.FakeError)) - close(done) - }) - - It("adds parsed csv row to rows channel for storage diff", func(done Done) { - line := getFakeLine() - - go storageFetcher.FetchStorageDiffs(diffsChannel, errorsChannel) - mockTailer.Lines <- line - - expectedRow, err := utils.FromParityCsvRow(strings.Split(line.Text, ",")) - Expect(err).NotTo(HaveOccurred()) - Expect(<-diffsChannel).To(Equal(expectedRow)) - close(done) - }) - - It("adds error to errors channel if parsing csv fails", func(done Done) { - line := &tail.Line{Text: "invalid"} - - go storageFetcher.FetchStorageDiffs(diffsChannel, errorsChannel) - mockTailer.Lines <- line - - Expect(<-errorsChannel).To(HaveOccurred()) - select { - case <-diffsChannel: - Fail("value passed to rows channel on error") - default: - Succeed() - } - close(done) - }) -}) - -func getFakeLine() *tail.Line { - address := common.HexToAddress("0x1234567890abcdef") - blockHash := []byte{4, 5, 6} - blockHeight := int64(789) - storageKey := []byte{9, 8, 7} - storageValue := []byte{6, 5, 4} - return &tail.Line{ - Text: fmt.Sprintf("%s,%s,%d,%s,%s", common.Bytes2Hex(address.Bytes()), common.Bytes2Hex(blockHash), - blockHeight, common.Bytes2Hex(storageKey), common.Bytes2Hex(storageValue)), - Time: time.Time{}, - Err: nil, - } -} diff --git a/libraries/shared/fetcher/fetcher_suite_test.go b/libraries/shared/fetcher/fetcher_suite_test.go deleted file mode 100644 index a2e02411..00000000 --- a/libraries/shared/fetcher/fetcher_suite_test.go +++ /dev/null @@ -1,35 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package fetcher_test - -import ( - "io/ioutil" - "testing" - - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - log "github.com/sirupsen/logrus" -) - -func TestFactories(t *testing.T) { - RegisterFailHandler(Fail) - RunSpecs(t, "Shared Fetcher Suite") -} - -var _ = BeforeSuite(func() { - log.SetOutput(ioutil.Discard) -}) diff --git a/libraries/shared/fetcher/geth_rpc_storage_fetcher.go b/libraries/shared/fetcher/geth_rpc_storage_fetcher.go deleted file mode 100644 index ca6f76f6..00000000 --- a/libraries/shared/fetcher/geth_rpc_storage_fetcher.go +++ /dev/null @@ -1,86 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . -package fetcher - -import ( - "fmt" - - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/rlp" - "github.com/ethereum/go-ethereum/statediff" - "github.com/sirupsen/logrus" - - "github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils" - "github.com/vulcanize/vulcanizedb/libraries/shared/streamer" -) - -const ( - PayloadChanBufferSize = 20000 // the max eth sub buffer size -) - -type GethRPCStorageFetcher struct { - StatediffPayloadChan chan statediff.Payload - streamer streamer.Streamer -} - -func NewGethRPCStorageFetcher(streamer streamer.Streamer) GethRPCStorageFetcher { - return GethRPCStorageFetcher{ - StatediffPayloadChan: make(chan statediff.Payload, PayloadChanBufferSize), - streamer: streamer, - } -} - -func (fetcher GethRPCStorageFetcher) FetchStorageDiffs(out chan<- utils.StorageDiffInput, errs chan<- error) { - ethStatediffPayloadChan := fetcher.StatediffPayloadChan - clientSubscription, clientSubErr := fetcher.streamer.Stream(ethStatediffPayloadChan, statediff.Params{}) - if clientSubErr != nil { - errs <- clientSubErr - panic(fmt.Sprintf("Error creating a geth client subscription: %v", clientSubErr)) - } - logrus.Info("Successfully created a geth client subscription: ", clientSubscription) - - for { - diff := <-ethStatediffPayloadChan - logrus.Trace("received a statediff") - stateDiff := new(statediff.StateObject) - decodeErr := rlp.DecodeBytes(diff.StateObjectRlp, stateDiff) - if decodeErr != nil { - logrus.Warn("Error decoding state diff into RLP: ", decodeErr) - errs <- decodeErr - } - - accounts := utils.GetAccountsFromDiff(*stateDiff) - logrus.Trace(fmt.Sprintf("iterating through %d accounts on stateDiff for block %d", len(accounts), stateDiff.BlockNumber)) - for _, account := range accounts { - logrus.Trace(fmt.Sprintf("iterating through %d Storage values on account with key %s", len(account.StorageNodes), common.BytesToHash(account.LeafKey).Hex())) - for _, storage := range account.StorageNodes { - diff, formatErr := utils.FromGethStateDiff(account, stateDiff, storage) - if formatErr != nil { - logrus.Error("failed to format utils.StorageDiff from storage with key: ", common.BytesToHash(storage.LeafKey), "from account with key: ", common.BytesToHash(account.LeafKey)) - errs <- formatErr - continue - } - logrus.Trace("adding storage diff to out channel", - "keccak of address: ", diff.HashedAddress.Hex(), - "block height: ", diff.BlockHeight, - "storage key: ", diff.StorageKey.Hex(), - "storage value: ", diff.StorageValue.Hex()) - - out <- diff - } - } - } -} diff --git a/libraries/shared/fetcher/geth_rpc_storage_fetcher_test.go b/libraries/shared/fetcher/geth_rpc_storage_fetcher_test.go deleted file mode 100644 index 76b14121..00000000 --- a/libraries/shared/fetcher/geth_rpc_storage_fetcher_test.go +++ /dev/null @@ -1,174 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package fetcher_test - -import ( - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/rlp" - "github.com/ethereum/go-ethereum/rpc" - "github.com/ethereum/go-ethereum/statediff" - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - - "github.com/vulcanize/vulcanizedb/libraries/shared/fetcher" - "github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils" - "github.com/vulcanize/vulcanizedb/libraries/shared/test_data" - "github.com/vulcanize/vulcanizedb/pkg/eth/fakes" -) - -type MockStoragediffStreamer struct { - subscribeError error - PassedPayloadChan chan statediff.Payload - PassedParams statediff.Params - streamPayloads []statediff.Payload -} - -func (streamer *MockStoragediffStreamer) Stream(statediffPayloadChan chan statediff.Payload, params statediff.Params) (*rpc.ClientSubscription, error) { - clientSubscription := rpc.ClientSubscription{} - streamer.PassedPayloadChan = statediffPayloadChan - streamer.PassedParams = params - go func() { - for _, payload := range streamer.streamPayloads { - streamer.PassedPayloadChan <- payload - } - }() - - return &clientSubscription, streamer.subscribeError -} - -func (streamer *MockStoragediffStreamer) SetSubscribeError(err error) { - streamer.subscribeError = err -} - -func (streamer *MockStoragediffStreamer) SetPayloads(payloads []statediff.Payload) { - streamer.streamPayloads = payloads -} - -var _ = Describe("Geth RPC Storage Fetcher", func() { - var streamer MockStoragediffStreamer - var statediffFetcher fetcher.GethRPCStorageFetcher - var storagediffChan chan utils.StorageDiffInput - var errorChan chan error - - BeforeEach(func() { - streamer = MockStoragediffStreamer{} - statediffFetcher = fetcher.NewGethRPCStorageFetcher(&streamer) - storagediffChan = make(chan utils.StorageDiffInput) - errorChan = make(chan error) - }) - - It("adds errors to error channel if the RPC subscription fails and panics", func(done Done) { - streamer.SetSubscribeError(fakes.FakeError) - - go func() { - failedSub := func() { - statediffFetcher.FetchStorageDiffs(storagediffChan, errorChan) - } - Expect(failedSub).To(Panic()) - }() - - Expect(<-errorChan).To(MatchError(fakes.FakeError)) - close(done) - }) - - It("streams StatediffPayloads from a Geth RPC subscription", func(done Done) { - streamer.SetPayloads([]statediff.Payload{test_data.MockStatediffPayload}) - - go statediffFetcher.FetchStorageDiffs(storagediffChan, errorChan) - - streamedPayload := <-statediffFetcher.StatediffPayloadChan - Expect(streamedPayload).To(Equal(test_data.MockStatediffPayload)) - Expect(streamer.PassedPayloadChan).To(Equal(statediffFetcher.StatediffPayloadChan)) - close(done) - }) - - It("adds errors to error channel if decoding the state diff RLP fails", func(done Done) { - badStatediffPayload := statediff.Payload{} - streamer.SetPayloads([]statediff.Payload{badStatediffPayload}) - - go statediffFetcher.FetchStorageDiffs(storagediffChan, errorChan) - - Expect(<-errorChan).To(MatchError("EOF")) - - close(done) - }) - - It("adds parsed statediff payloads to the rows channel", func(done Done) { - streamer.SetPayloads([]statediff.Payload{test_data.MockStatediffPayload}) - - go statediffFetcher.FetchStorageDiffs(storagediffChan, errorChan) - - height := test_data.BlockNumber - intHeight := int(height.Int64()) - createdExpectedStorageDiff := utils.StorageDiffInput{ - HashedAddress: common.BytesToHash(test_data.ContractLeafKey[:]), - BlockHash: common.HexToHash("0xfa40fbe2d98d98b3363a778d52f2bcd29d6790b9b3f3cab2b167fd12d3550f73"), - BlockHeight: intHeight, - StorageKey: common.BytesToHash(test_data.StorageKey), - StorageValue: common.BytesToHash(test_data.SmallStorageValue), - } - updatedExpectedStorageDiff := utils.StorageDiffInput{ - HashedAddress: common.BytesToHash(test_data.AnotherContractLeafKey[:]), - BlockHash: common.HexToHash("0xfa40fbe2d98d98b3363a778d52f2bcd29d6790b9b3f3cab2b167fd12d3550f73"), - BlockHeight: intHeight, - StorageKey: common.BytesToHash(test_data.StorageKey), - StorageValue: common.BytesToHash(test_data.LargeStorageValue), - } - deletedExpectedStorageDiff := utils.StorageDiffInput{ - HashedAddress: common.BytesToHash(test_data.AnotherContractLeafKey[:]), - BlockHash: common.HexToHash("0xfa40fbe2d98d98b3363a778d52f2bcd29d6790b9b3f3cab2b167fd12d3550f73"), - BlockHeight: intHeight, - StorageKey: common.BytesToHash(test_data.StorageKey), - StorageValue: common.BytesToHash(test_data.SmallStorageValue), - } - - createdStateDiff := <-storagediffChan - updatedStateDiff := <-storagediffChan - deletedStateDiff := <-storagediffChan - - Expect(createdStateDiff).To(Equal(createdExpectedStorageDiff)) - Expect(updatedStateDiff).To(Equal(updatedExpectedStorageDiff)) - Expect(deletedStateDiff).To(Equal(deletedExpectedStorageDiff)) - - close(done) - }) - - It("adds errors to error channel if formatting the diff as a StateDiff object fails", func(done Done) { - accountDiffs := test_data.CreatedAccountDiffs - accountDiffs[0].StorageNodes = []statediff.StorageNode{test_data.StorageWithBadValue} - - stateDiff := statediff.StateObject{ - BlockNumber: test_data.BlockNumber, - BlockHash: common.HexToHash(test_data.BlockHash), - Nodes: accountDiffs, - } - - stateDiffRlp, err := rlp.EncodeToBytes(stateDiff) - Expect(err).NotTo(HaveOccurred()) - - badStatediffPayload := statediff.Payload{ - StateObjectRlp: stateDiffRlp, - } - streamer.SetPayloads([]statediff.Payload{badStatediffPayload}) - - go statediffFetcher.FetchStorageDiffs(storagediffChan, errorChan) - - Expect(<-errorChan).To(MatchError("rlp: input contains more than one value")) - - close(done) - }) -}) diff --git a/libraries/shared/fetcher/log_fetcher.go b/libraries/shared/fetcher/log_fetcher.go deleted file mode 100644 index 8c1ad2bb..00000000 --- a/libraries/shared/fetcher/log_fetcher.go +++ /dev/null @@ -1,59 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package fetcher - -import ( - "github.com/ethereum/go-ethereum" - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/core/types" - - "github.com/vulcanize/vulcanizedb/pkg/eth/core" -) - -type ILogFetcher interface { - FetchLogs(contractAddresses []common.Address, topics []common.Hash, missingHeader core.Header) ([]types.Log, error) - // TODO Extend FetchLogs for doing several blocks at a time -} - -type LogFetcher struct { - blockChain core.BlockChain -} - -func NewLogFetcher(blockchain core.BlockChain) *LogFetcher { - return &LogFetcher{ - blockChain: blockchain, - } -} - -// Checks all topic0s, on all addresses, fetching matching logs for the given header -func (logFetcher LogFetcher) FetchLogs(addresses []common.Address, topic0s []common.Hash, header core.Header) ([]types.Log, error) { - blockHash := common.HexToHash(header.Hash) - query := ethereum.FilterQuery{ - BlockHash: &blockHash, - Addresses: addresses, - // Search for _any_ of the topics in topic0 position; see docs on `FilterQuery` - Topics: [][]common.Hash{topic0s}, - } - - logs, err := logFetcher.blockChain.GetEthLogsWithCustomQuery(query) - if err != nil { - // TODO review aggregate fetching error handling - return []types.Log{}, err - } - - return logs, nil -} diff --git a/libraries/shared/fetcher/log_fetcher_test.go b/libraries/shared/fetcher/log_fetcher_test.go deleted file mode 100644 index e305f981..00000000 --- a/libraries/shared/fetcher/log_fetcher_test.go +++ /dev/null @@ -1,70 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package fetcher_test - -import ( - "github.com/ethereum/go-ethereum" - "github.com/ethereum/go-ethereum/common" - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - - "github.com/vulcanize/vulcanizedb/libraries/shared/fetcher" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" - "github.com/vulcanize/vulcanizedb/pkg/eth/fakes" -) - -var _ = Describe("LogFetcher", func() { - Describe("FetchLogs", func() { - It("fetches logs based on the given query", func() { - blockChain := fakes.NewMockBlockChain() - logFetcher := fetcher.NewLogFetcher(blockChain) - header := fakes.FakeHeader - - addresses := []common.Address{ - common.HexToAddress("0xfakeAddress"), - common.HexToAddress("0xanotherFakeAddress"), - } - - topicZeros := []common.Hash{common.BytesToHash([]byte{1, 2, 3, 4, 5})} - - _, err := logFetcher.FetchLogs(addresses, topicZeros, header) - - address1 := common.HexToAddress("0xfakeAddress") - address2 := common.HexToAddress("0xanotherFakeAddress") - Expect(err).NotTo(HaveOccurred()) - - blockHash := common.HexToHash(header.Hash) - expectedQuery := ethereum.FilterQuery{ - BlockHash: &blockHash, - Addresses: []common.Address{address1, address2}, - Topics: [][]common.Hash{topicZeros}, - } - blockChain.AssertGetEthLogsWithCustomQueryCalledWith(expectedQuery) - }) - - It("returns an error if fetching the logs fails", func() { - blockChain := fakes.NewMockBlockChain() - blockChain.SetGetEthLogsWithCustomQueryErr(fakes.FakeError) - logFetcher := fetcher.NewLogFetcher(blockChain) - - _, err := logFetcher.FetchLogs([]common.Address{}, []common.Hash{}, core.Header{}) - - Expect(err).To(HaveOccurred()) - Expect(err).To(MatchError(fakes.FakeError)) - }) - }) -}) diff --git a/libraries/shared/fetcher/state_diff_fetcher.go b/libraries/shared/fetcher/state_diff_fetcher.go deleted file mode 100644 index fa5ebdfc..00000000 --- a/libraries/shared/fetcher/state_diff_fetcher.go +++ /dev/null @@ -1,79 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package fetcher - -import ( - "fmt" - - "github.com/ethereum/go-ethereum/statediff" - - "github.com/vulcanize/vulcanizedb/pkg/eth/client" -) - -// StateDiffFetcher is the state diff fetching interface -type StateDiffFetcher interface { - FetchStateDiffsAt(blockHeights []uint64) ([]statediff.Payload, error) -} - -// BatchClient is an interface to a batch-fetching geth rpc client; created to allow mock insertion -type BatchClient interface { - BatchCall(batch []client.BatchElem) error -} - -// stateDiffFetcher is the state diff fetching struct -type stateDiffFetcher struct { - // stateDiffFetcher is thread-safe as long as the underlying client is thread-safe, since it has/modifies no other state - // http.Client is thread-safe - client BatchClient -} - -const method = "statediff_stateDiffAt" - -// NewStateDiffFetcher returns a IStateDiffFetcher -func NewStateDiffFetcher(bc BatchClient) StateDiffFetcher { - return &stateDiffFetcher{ - client: bc, - } -} - -// FetchStateDiffsAt fetches the statediff payloads at the given block heights -// Calls StateDiffAt(ctx context.Context, blockNumber uint64) (*Payload, error) -func (fetcher *stateDiffFetcher) FetchStateDiffsAt(blockHeights []uint64) ([]statediff.Payload, error) { - batch := make([]client.BatchElem, 0) - for _, height := range blockHeights { - batch = append(batch, client.BatchElem{ - Method: method, - Args: []interface{}{height}, - Result: new(statediff.Payload), - }) - } - batchErr := fetcher.client.BatchCall(batch) - if batchErr != nil { - return nil, fmt.Errorf("stateDiffFetcher err: %s", batchErr.Error()) - } - results := make([]statediff.Payload, 0, len(blockHeights)) - for _, batchElem := range batch { - if batchElem.Error != nil { - return nil, fmt.Errorf("stateDiffFetcher err: %s", batchElem.Error.Error()) - } - payload, ok := batchElem.Result.(*statediff.Payload) - if ok { - results = append(results, *payload) - } - } - return results, nil -} diff --git a/libraries/shared/fetcher/state_diff_fetcher_test.go b/libraries/shared/fetcher/state_diff_fetcher_test.go deleted file mode 100644 index 96c1dc8d..00000000 --- a/libraries/shared/fetcher/state_diff_fetcher_test.go +++ /dev/null @@ -1,54 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package fetcher_test - -import ( - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - - "github.com/vulcanize/vulcanizedb/libraries/shared/fetcher" - "github.com/vulcanize/vulcanizedb/libraries/shared/mocks" - "github.com/vulcanize/vulcanizedb/libraries/shared/test_data" -) - -var _ = Describe("StateDiffFetcher", func() { - Describe("FetchStateDiffsAt", func() { - var ( - mc *mocks.BackFillerClient - stateDiffFetcher fetcher.StateDiffFetcher - ) - BeforeEach(func() { - mc = new(mocks.BackFillerClient) - setDiffAtErr1 := mc.SetReturnDiffAt(test_data.BlockNumber.Uint64(), test_data.MockStatediffPayload) - Expect(setDiffAtErr1).ToNot(HaveOccurred()) - setDiffAtErr2 := mc.SetReturnDiffAt(test_data.BlockNumber2.Uint64(), test_data.MockStatediffPayload2) - Expect(setDiffAtErr2).ToNot(HaveOccurred()) - stateDiffFetcher = fetcher.NewStateDiffFetcher(mc) - }) - It("Batch calls statediff_stateDiffAt", func() { - blockHeights := []uint64{ - test_data.BlockNumber.Uint64(), - test_data.BlockNumber2.Uint64(), - } - stateDiffPayloads, fetchErr := stateDiffFetcher.FetchStateDiffsAt(blockHeights) - Expect(fetchErr).ToNot(HaveOccurred()) - Expect(len(stateDiffPayloads)).To(Equal(2)) - Expect(stateDiffPayloads[0]).To(Equal(test_data.MockStatediffPayload)) - Expect(stateDiffPayloads[1]).To(Equal(test_data.MockStatediffPayload2)) - }) - }) -}) diff --git a/libraries/shared/fetcher/storage_fetcher_interface.go b/libraries/shared/fetcher/storage_fetcher_interface.go deleted file mode 100644 index 34e0b5ae..00000000 --- a/libraries/shared/fetcher/storage_fetcher_interface.go +++ /dev/null @@ -1,21 +0,0 @@ -// Copyright 2018 Vulcanize -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package fetcher - -import "github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils" - -type IStorageFetcher interface { - FetchStorageDiffs(out chan<- utils.StorageDiffInput, errs chan<- error) -} diff --git a/libraries/shared/logs/delegator.go b/libraries/shared/logs/delegator.go deleted file mode 100644 index 22b3da75..00000000 --- a/libraries/shared/logs/delegator.go +++ /dev/null @@ -1,86 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package logs - -import ( - "errors" - - "github.com/sirupsen/logrus" - "github.com/vulcanize/vulcanizedb/libraries/shared/chunker" - "github.com/vulcanize/vulcanizedb/libraries/shared/transformer" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" - "github.com/vulcanize/vulcanizedb/pkg/eth/datastore" -) - -var ( - ErrNoLogs = errors.New("no logs available for transforming") - ErrNoTransformers = errors.New("no event transformers configured in the log delegator") -) - -type ILogDelegator interface { - AddTransformer(t transformer.EventTransformer) - DelegateLogs() error -} - -type LogDelegator struct { - Chunker chunker.Chunker - LogRepository datastore.HeaderSyncLogRepository - Transformers []transformer.EventTransformer -} - -func (delegator *LogDelegator) AddTransformer(t transformer.EventTransformer) { - delegator.Transformers = append(delegator.Transformers, t) - delegator.Chunker.AddConfig(t.GetConfig()) -} - -func (delegator *LogDelegator) DelegateLogs() error { - if len(delegator.Transformers) < 1 { - return ErrNoTransformers - } - - persistedLogs, fetchErr := delegator.LogRepository.GetUntransformedHeaderSyncLogs() - if fetchErr != nil { - logrus.Errorf("error loading logs from db: %s", fetchErr.Error()) - return fetchErr - } - - if len(persistedLogs) < 1 { - return ErrNoLogs - } - - transformErr := delegator.delegateLogs(persistedLogs) - if transformErr != nil { - logrus.Errorf("error transforming logs: %s", transformErr) - return transformErr - } - - return nil -} - -func (delegator *LogDelegator) delegateLogs(logs []core.HeaderSyncLog) error { - chunkedLogs := delegator.Chunker.ChunkLogs(logs) - for _, t := range delegator.Transformers { - transformerName := t.GetConfig().TransformerName - logChunk := chunkedLogs[transformerName] - err := t.Execute(logChunk) - if err != nil { - logrus.Errorf("%v transformer failed to execute in watcher: %v", transformerName, err) - return err - } - } - return nil -} diff --git a/libraries/shared/logs/delegator_test.go b/libraries/shared/logs/delegator_test.go deleted file mode 100644 index 898c4918..00000000 --- a/libraries/shared/logs/delegator_test.go +++ /dev/null @@ -1,166 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package logs_test - -import ( - "strings" - - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/core/types" - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - "github.com/vulcanize/vulcanizedb/libraries/shared/chunker" - "github.com/vulcanize/vulcanizedb/libraries/shared/logs" - "github.com/vulcanize/vulcanizedb/libraries/shared/mocks" - "github.com/vulcanize/vulcanizedb/libraries/shared/transformer" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" - "github.com/vulcanize/vulcanizedb/pkg/eth/fakes" -) - -var _ = Describe("Log delegator", func() { - Describe("AddTransformer", func() { - It("adds transformers to the delegator", func() { - fakeTransformer := &mocks.MockEventTransformer{} - delegator := logs.LogDelegator{Chunker: chunker.NewLogChunker()} - - delegator.AddTransformer(fakeTransformer) - - Expect(delegator.Transformers).To(Equal([]transformer.EventTransformer{fakeTransformer})) - }) - - It("passes transformers' configs to the chunker", func() { - fakeTransformer := &mocks.MockEventTransformer{} - fakeConfig := mocks.FakeTransformerConfig - fakeTransformer.SetTransformerConfig(fakeConfig) - chunker := chunker.NewLogChunker() - delegator := logs.LogDelegator{Chunker: chunker} - - delegator.AddTransformer(fakeTransformer) - - expectedName := fakeConfig.TransformerName - expectedTopic := common.HexToHash(fakeConfig.Topic) - Expect(chunker.NameToTopic0).To(Equal(map[string]common.Hash{expectedName: expectedTopic})) - expectedAddress := strings.ToLower(fakeConfig.ContractAddresses[0]) - Expect(chunker.AddressToNames).To(Equal(map[string][]string{expectedAddress: {expectedName}})) - }) - }) - - Describe("DelegateLogs", func() { - It("returns error if no transformers configured", func() { - delegator := newDelegator(&fakes.MockHeaderSyncLogRepository{}) - - err := delegator.DelegateLogs() - - Expect(err).To(HaveOccurred()) - Expect(err).To(MatchError(logs.ErrNoTransformers)) - }) - - It("gets untransformed logs", func() { - mockLogRepository := &fakes.MockHeaderSyncLogRepository{} - mockLogRepository.ReturnLogs = []core.HeaderSyncLog{{}} - delegator := newDelegator(mockLogRepository) - delegator.AddTransformer(&mocks.MockEventTransformer{}) - - err := delegator.DelegateLogs() - - Expect(err).NotTo(HaveOccurred()) - Expect(mockLogRepository.GetCalled).To(BeTrue()) - }) - - It("returns error if getting untransformed logs fails", func() { - mockLogRepository := &fakes.MockHeaderSyncLogRepository{} - mockLogRepository.GetError = fakes.FakeError - delegator := newDelegator(mockLogRepository) - delegator.AddTransformer(&mocks.MockEventTransformer{}) - - err := delegator.DelegateLogs() - - Expect(err).To(HaveOccurred()) - Expect(err).To(MatchError(fakes.FakeError)) - }) - - It("returns error that no logs were found if no logs returned", func() { - delegator := newDelegator(&fakes.MockHeaderSyncLogRepository{}) - delegator.AddTransformer(&mocks.MockEventTransformer{}) - - err := delegator.DelegateLogs() - - Expect(err).To(HaveOccurred()) - Expect(err).To(MatchError(logs.ErrNoLogs)) - }) - - It("delegates chunked logs to transformers", func() { - fakeTransformer := &mocks.MockEventTransformer{} - config := mocks.FakeTransformerConfig - fakeTransformer.SetTransformerConfig(config) - fakeGethLog := types.Log{ - Address: common.HexToAddress(config.ContractAddresses[0]), - Topics: []common.Hash{common.HexToHash(config.Topic)}, - } - fakeHeaderSyncLogs := []core.HeaderSyncLog{{Log: fakeGethLog}} - mockLogRepository := &fakes.MockHeaderSyncLogRepository{} - mockLogRepository.ReturnLogs = fakeHeaderSyncLogs - delegator := newDelegator(mockLogRepository) - delegator.AddTransformer(fakeTransformer) - - err := delegator.DelegateLogs() - - Expect(err).NotTo(HaveOccurred()) - Expect(fakeTransformer.ExecuteWasCalled).To(BeTrue()) - Expect(fakeTransformer.PassedLogs).To(Equal(fakeHeaderSyncLogs)) - }) - - It("returns error if transformer returns an error", func() { - mockLogRepository := &fakes.MockHeaderSyncLogRepository{} - mockLogRepository.ReturnLogs = []core.HeaderSyncLog{{}} - delegator := newDelegator(mockLogRepository) - fakeTransformer := &mocks.MockEventTransformer{ExecuteError: fakes.FakeError} - delegator.AddTransformer(fakeTransformer) - - err := delegator.DelegateLogs() - - Expect(err).To(HaveOccurred()) - Expect(err).To(MatchError(fakes.FakeError)) - }) - - It("returns nil for error when logs returned and delegated", func() { - fakeTransformer := &mocks.MockEventTransformer{} - config := mocks.FakeTransformerConfig - fakeTransformer.SetTransformerConfig(config) - fakeGethLog := types.Log{ - Address: common.HexToAddress(config.ContractAddresses[0]), - Topics: []common.Hash{common.HexToHash(config.Topic)}, - } - fakeHeaderSyncLogs := []core.HeaderSyncLog{{Log: fakeGethLog}} - mockLogRepository := &fakes.MockHeaderSyncLogRepository{} - mockLogRepository.ReturnLogs = fakeHeaderSyncLogs - delegator := newDelegator(mockLogRepository) - delegator.AddTransformer(fakeTransformer) - - err := delegator.DelegateLogs() - - Expect(err).NotTo(HaveOccurred()) - }) - }) -}) - -func newDelegator(headerSyncLogRepository *fakes.MockHeaderSyncLogRepository) *logs.LogDelegator { - return &logs.LogDelegator{ - Chunker: chunker.NewLogChunker(), - LogRepository: headerSyncLogRepository, - } -} diff --git a/libraries/shared/logs/extractor.go b/libraries/shared/logs/extractor.go deleted file mode 100644 index 2e78c861..00000000 --- a/libraries/shared/logs/extractor.go +++ /dev/null @@ -1,154 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package logs - -import ( - "errors" - - "github.com/ethereum/go-ethereum/common" - "github.com/sirupsen/logrus" - "github.com/vulcanize/vulcanizedb/libraries/shared/constants" - "github.com/vulcanize/vulcanizedb/libraries/shared/fetcher" - "github.com/vulcanize/vulcanizedb/libraries/shared/transactions" - "github.com/vulcanize/vulcanizedb/libraries/shared/transformer" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" - "github.com/vulcanize/vulcanizedb/pkg/eth/datastore" -) - -var ( - ErrNoUncheckedHeaders = errors.New("no unchecked headers available for log fetching") - ErrNoWatchedAddresses = errors.New("no watched addresses configured in the log extractor") -) - -type ILogExtractor interface { - AddTransformerConfig(config transformer.EventTransformerConfig) error - ExtractLogs(recheckHeaders constants.TransformerExecution) error -} - -type LogExtractor struct { - Addresses []common.Address - CheckedHeadersRepository datastore.CheckedHeadersRepository - CheckedLogsRepository datastore.CheckedLogsRepository - Fetcher fetcher.ILogFetcher - LogRepository datastore.HeaderSyncLogRepository - StartingBlock *int64 - Syncer transactions.ITransactionsSyncer - Topics []common.Hash -} - -// Add additional logs to extract -func (extractor *LogExtractor) AddTransformerConfig(config transformer.EventTransformerConfig) error { - checkedHeadersErr := extractor.updateCheckedHeaders(config) - if checkedHeadersErr != nil { - return checkedHeadersErr - } - - if extractor.StartingBlock == nil { - extractor.StartingBlock = &config.StartingBlockNumber - } else if earlierStartingBlockNumber(config.StartingBlockNumber, *extractor.StartingBlock) { - extractor.StartingBlock = &config.StartingBlockNumber - } - - addresses := transformer.HexStringsToAddresses(config.ContractAddresses) - extractor.Addresses = append(extractor.Addresses, addresses...) - extractor.Topics = append(extractor.Topics, common.HexToHash(config.Topic)) - return nil -} - -// Fetch and persist watched logs -func (extractor LogExtractor) ExtractLogs(recheckHeaders constants.TransformerExecution) error { - if len(extractor.Addresses) < 1 { - logrus.Errorf("error extracting logs: %s", ErrNoWatchedAddresses.Error()) - return ErrNoWatchedAddresses - } - - uncheckedHeaders, uncheckedHeadersErr := extractor.CheckedHeadersRepository.UncheckedHeaders(*extractor.StartingBlock, -1, getCheckCount(recheckHeaders)) - if uncheckedHeadersErr != nil { - logrus.Errorf("error fetching missing headers: %s", uncheckedHeadersErr) - return uncheckedHeadersErr - } - - if len(uncheckedHeaders) < 1 { - return ErrNoUncheckedHeaders - } - - for _, header := range uncheckedHeaders { - logs, fetchLogsErr := extractor.Fetcher.FetchLogs(extractor.Addresses, extractor.Topics, header) - if fetchLogsErr != nil { - logError("error fetching logs for header: %s", fetchLogsErr, header) - return fetchLogsErr - } - - if len(logs) > 0 { - transactionsSyncErr := extractor.Syncer.SyncTransactions(header.ID, logs) - if transactionsSyncErr != nil { - logError("error syncing transactions: %s", transactionsSyncErr, header) - return transactionsSyncErr - } - - createLogsErr := extractor.LogRepository.CreateHeaderSyncLogs(header.ID, logs) - if createLogsErr != nil { - logError("error persisting logs: %s", createLogsErr, header) - return createLogsErr - } - } - - markHeaderCheckedErr := extractor.CheckedHeadersRepository.MarkHeaderChecked(header.ID) - if markHeaderCheckedErr != nil { - logError("error marking header checked: %s", markHeaderCheckedErr, header) - return markHeaderCheckedErr - } - } - return nil -} - -func earlierStartingBlockNumber(transformerBlock, watcherBlock int64) bool { - return transformerBlock < watcherBlock -} - -func logError(description string, err error, header core.Header) { - logrus.WithFields(logrus.Fields{ - "headerId": header.ID, - "headerHash": header.Hash, - "blockNumber": header.BlockNumber, - }).Errorf(description, err.Error()) -} - -func getCheckCount(recheckHeaders constants.TransformerExecution) int64 { - if recheckHeaders == constants.HeaderUnchecked { - return 1 - } - return constants.RecheckHeaderCap -} - -func (extractor *LogExtractor) updateCheckedHeaders(config transformer.EventTransformerConfig) error { - alreadyWatchingLog, watchingLogErr := extractor.CheckedLogsRepository.AlreadyWatchingLog(config.ContractAddresses, config.Topic) - if watchingLogErr != nil { - return watchingLogErr - } - if !alreadyWatchingLog { - uncheckHeadersErr := extractor.CheckedHeadersRepository.MarkHeadersUnchecked(config.StartingBlockNumber) - if uncheckHeadersErr != nil { - return uncheckHeadersErr - } - markLogWatchedErr := extractor.CheckedLogsRepository.MarkLogWatched(config.ContractAddresses, config.Topic) - if markLogWatchedErr != nil { - return markLogWatchedErr - } - } - return nil -} diff --git a/libraries/shared/logs/extractor_test.go b/libraries/shared/logs/extractor_test.go deleted file mode 100644 index 09971383..00000000 --- a/libraries/shared/logs/extractor_test.go +++ /dev/null @@ -1,415 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package logs_test - -import ( - "math/rand" - - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/core/types" - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - "github.com/vulcanize/vulcanizedb/libraries/shared/constants" - "github.com/vulcanize/vulcanizedb/libraries/shared/logs" - "github.com/vulcanize/vulcanizedb/libraries/shared/mocks" - "github.com/vulcanize/vulcanizedb/libraries/shared/transformer" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" - "github.com/vulcanize/vulcanizedb/pkg/eth/fakes" -) - -var _ = Describe("Log extractor", func() { - var ( - checkedHeadersRepository *fakes.MockCheckedHeadersRepository - checkedLogsRepository *fakes.MockCheckedLogsRepository - extractor *logs.LogExtractor - ) - - BeforeEach(func() { - checkedHeadersRepository = &fakes.MockCheckedHeadersRepository{} - checkedLogsRepository = &fakes.MockCheckedLogsRepository{} - extractor = &logs.LogExtractor{ - CheckedHeadersRepository: checkedHeadersRepository, - CheckedLogsRepository: checkedLogsRepository, - Fetcher: &mocks.MockLogFetcher{}, - LogRepository: &fakes.MockHeaderSyncLogRepository{}, - Syncer: &fakes.MockTransactionSyncer{}, - } - }) - - Describe("AddTransformerConfig", func() { - It("updates extractor's starting block number to earliest available", func() { - earlierStartingBlockNumber := rand.Int63() - laterStartingBlockNumber := earlierStartingBlockNumber + 1 - - errOne := extractor.AddTransformerConfig(getTransformerConfig(laterStartingBlockNumber)) - Expect(errOne).NotTo(HaveOccurred()) - errTwo := extractor.AddTransformerConfig(getTransformerConfig(earlierStartingBlockNumber)) - Expect(errTwo).NotTo(HaveOccurred()) - - Expect(*extractor.StartingBlock).To(Equal(earlierStartingBlockNumber)) - }) - - It("adds transformer's addresses to extractor's watched addresses", func() { - addresses := []string{"0xA", "0xB"} - configWithAddresses := transformer.EventTransformerConfig{ - ContractAddresses: addresses, - StartingBlockNumber: rand.Int63(), - } - - err := extractor.AddTransformerConfig(configWithAddresses) - - Expect(err).NotTo(HaveOccurred()) - expectedAddresses := transformer.HexStringsToAddresses(addresses) - Expect(extractor.Addresses).To(Equal(expectedAddresses)) - }) - - It("adds transformer's topic to extractor's watched topics", func() { - topic := "0x1" - configWithTopic := transformer.EventTransformerConfig{ - ContractAddresses: []string{fakes.FakeAddress.Hex()}, - Topic: topic, - StartingBlockNumber: rand.Int63(), - } - - err := extractor.AddTransformerConfig(configWithTopic) - - Expect(err).NotTo(HaveOccurred()) - Expect(extractor.Topics).To(Equal([]common.Hash{common.HexToHash(topic)})) - }) - - It("returns error if checking whether log has been checked returns error", func() { - checkedLogsRepository.AlreadyWatchingLogError = fakes.FakeError - - err := extractor.AddTransformerConfig(getTransformerConfig(rand.Int63())) - - Expect(err).To(HaveOccurred()) - Expect(err).To(MatchError(fakes.FakeError)) - }) - - Describe("when log has previously been checked", func() { - It("does not mark any headers unchecked", func() { - checkedLogsRepository.AlreadyWatchingLogReturn = true - - err := extractor.AddTransformerConfig(getTransformerConfig(rand.Int63())) - - Expect(err).NotTo(HaveOccurred()) - Expect(checkedHeadersRepository.MarkHeadersUncheckedCalled).To(BeFalse()) - }) - }) - - Describe("when log has not previously been checked", func() { - BeforeEach(func() { - checkedLogsRepository.AlreadyWatchingLogReturn = false - }) - - It("marks headers since transformer's starting block number as unchecked", func() { - blockNumber := rand.Int63() - - err := extractor.AddTransformerConfig(getTransformerConfig(blockNumber)) - - Expect(err).NotTo(HaveOccurred()) - Expect(checkedHeadersRepository.MarkHeadersUncheckedCalled).To(BeTrue()) - Expect(checkedHeadersRepository.MarkHeadersUncheckedStartingBlockNumber).To(Equal(blockNumber)) - }) - - It("returns error if marking headers unchecked returns error", func() { - checkedHeadersRepository.MarkHeadersUncheckedReturnError = fakes.FakeError - - err := extractor.AddTransformerConfig(getTransformerConfig(rand.Int63())) - - Expect(err).To(HaveOccurred()) - Expect(err).To(MatchError(fakes.FakeError)) - }) - - It("persists that tranformer's log has been checked", func() { - config := getTransformerConfig(rand.Int63()) - - err := extractor.AddTransformerConfig(config) - - Expect(err).NotTo(HaveOccurred()) - Expect(checkedLogsRepository.MarkLogWatchedAddresses).To(Equal(config.ContractAddresses)) - Expect(checkedLogsRepository.MarkLogWatchedTopicZero).To(Equal(config.Topic)) - }) - - It("returns error if marking logs checked returns error", func() { - checkedLogsRepository.MarkLogWatchedError = fakes.FakeError - - err := extractor.AddTransformerConfig(getTransformerConfig(rand.Int63())) - - Expect(err).To(HaveOccurred()) - Expect(err).To(MatchError(fakes.FakeError)) - }) - }) - }) - - Describe("ExtractLogs", func() { - It("returns error if no watched addresses configured", func() { - err := extractor.ExtractLogs(constants.HeaderUnchecked) - - Expect(err).To(HaveOccurred()) - Expect(err).To(MatchError(logs.ErrNoWatchedAddresses)) - }) - - Describe("when checking unchecked headers", func() { - It("gets headers since configured starting block with check_count < 1", func() { - mockCheckedHeadersRepository := &fakes.MockCheckedHeadersRepository{} - mockCheckedHeadersRepository.UncheckedHeadersReturnHeaders = []core.Header{{}} - extractor.CheckedHeadersRepository = mockCheckedHeadersRepository - startingBlockNumber := rand.Int63() - extractor.AddTransformerConfig(getTransformerConfig(startingBlockNumber)) - - err := extractor.ExtractLogs(constants.HeaderUnchecked) - - Expect(err).NotTo(HaveOccurred()) - Expect(mockCheckedHeadersRepository.UncheckedHeadersStartingBlockNumber).To(Equal(startingBlockNumber)) - Expect(mockCheckedHeadersRepository.UncheckedHeadersEndingBlockNumber).To(Equal(int64(-1))) - Expect(mockCheckedHeadersRepository.UncheckedHeadersCheckCount).To(Equal(int64(1))) - }) - }) - - Describe("when rechecking headers", func() { - It("gets headers since configured starting block with check_count < RecheckHeaderCap", func() { - mockCheckedHeadersRepository := &fakes.MockCheckedHeadersRepository{} - mockCheckedHeadersRepository.UncheckedHeadersReturnHeaders = []core.Header{{}} - extractor.CheckedHeadersRepository = mockCheckedHeadersRepository - startingBlockNumber := rand.Int63() - extractor.AddTransformerConfig(getTransformerConfig(startingBlockNumber)) - - err := extractor.ExtractLogs(constants.HeaderRecheck) - - Expect(err).NotTo(HaveOccurred()) - Expect(mockCheckedHeadersRepository.UncheckedHeadersStartingBlockNumber).To(Equal(startingBlockNumber)) - Expect(mockCheckedHeadersRepository.UncheckedHeadersEndingBlockNumber).To(Equal(int64(-1))) - Expect(mockCheckedHeadersRepository.UncheckedHeadersCheckCount).To(Equal(constants.RecheckHeaderCap)) - }) - }) - - It("returns error if getting unchecked headers fails", func() { - addTransformerConfig(extractor) - mockCheckedHeadersRepository := &fakes.MockCheckedHeadersRepository{} - mockCheckedHeadersRepository.UncheckedHeadersReturnError = fakes.FakeError - extractor.CheckedHeadersRepository = mockCheckedHeadersRepository - - err := extractor.ExtractLogs(constants.HeaderUnchecked) - - Expect(err).To(HaveOccurred()) - Expect(err).To(MatchError(fakes.FakeError)) - }) - - Describe("when no unchecked headers", func() { - It("does not fetch logs", func() { - addTransformerConfig(extractor) - mockLogFetcher := &mocks.MockLogFetcher{} - extractor.Fetcher = mockLogFetcher - - _ = extractor.ExtractLogs(constants.HeaderUnchecked) - - Expect(mockLogFetcher.FetchCalled).To(BeFalse()) - }) - - It("returns error that no unchecked headers were found", func() { - addTransformerConfig(extractor) - mockLogFetcher := &mocks.MockLogFetcher{} - extractor.Fetcher = mockLogFetcher - - err := extractor.ExtractLogs(constants.HeaderUnchecked) - - Expect(err).To(MatchError(logs.ErrNoUncheckedHeaders)) - }) - }) - - Describe("when there are unchecked headers", func() { - It("fetches logs for unchecked headers", func() { - addUncheckedHeader(extractor) - config := transformer.EventTransformerConfig{ - ContractAddresses: []string{fakes.FakeAddress.Hex()}, - Topic: fakes.FakeHash.Hex(), - StartingBlockNumber: rand.Int63(), - } - addTransformerErr := extractor.AddTransformerConfig(config) - Expect(addTransformerErr).NotTo(HaveOccurred()) - mockLogFetcher := &mocks.MockLogFetcher{} - extractor.Fetcher = mockLogFetcher - - err := extractor.ExtractLogs(constants.HeaderUnchecked) - - Expect(err).NotTo(HaveOccurred()) - Expect(mockLogFetcher.FetchCalled).To(BeTrue()) - expectedTopics := []common.Hash{common.HexToHash(config.Topic)} - Expect(mockLogFetcher.Topics).To(Equal(expectedTopics)) - expectedAddresses := transformer.HexStringsToAddresses(config.ContractAddresses) - Expect(mockLogFetcher.ContractAddresses).To(Equal(expectedAddresses)) - }) - - It("returns error if fetching logs fails", func() { - addUncheckedHeader(extractor) - addTransformerConfig(extractor) - mockLogFetcher := &mocks.MockLogFetcher{} - mockLogFetcher.ReturnError = fakes.FakeError - extractor.Fetcher = mockLogFetcher - - err := extractor.ExtractLogs(constants.HeaderUnchecked) - - Expect(err).To(HaveOccurred()) - Expect(err).To(MatchError(fakes.FakeError)) - }) - - Describe("when no fetched logs", func() { - It("does not sync transactions", func() { - addUncheckedHeader(extractor) - addTransformerConfig(extractor) - mockTransactionSyncer := &fakes.MockTransactionSyncer{} - extractor.Syncer = mockTransactionSyncer - - err := extractor.ExtractLogs(constants.HeaderUnchecked) - - Expect(err).NotTo(HaveOccurred()) - Expect(mockTransactionSyncer.SyncTransactionsCalled).To(BeFalse()) - }) - }) - - Describe("when there are fetched logs", func() { - It("syncs transactions", func() { - addUncheckedHeader(extractor) - addFetchedLog(extractor) - addTransformerConfig(extractor) - mockTransactionSyncer := &fakes.MockTransactionSyncer{} - extractor.Syncer = mockTransactionSyncer - - err := extractor.ExtractLogs(constants.HeaderUnchecked) - - Expect(err).NotTo(HaveOccurred()) - Expect(mockTransactionSyncer.SyncTransactionsCalled).To(BeTrue()) - }) - - It("returns error if syncing transactions fails", func() { - addUncheckedHeader(extractor) - addFetchedLog(extractor) - addTransformerConfig(extractor) - mockTransactionSyncer := &fakes.MockTransactionSyncer{} - mockTransactionSyncer.SyncTransactionsError = fakes.FakeError - extractor.Syncer = mockTransactionSyncer - - err := extractor.ExtractLogs(constants.HeaderUnchecked) - - Expect(err).To(HaveOccurred()) - Expect(err).To(MatchError(fakes.FakeError)) - }) - - It("persists fetched logs", func() { - addUncheckedHeader(extractor) - addTransformerConfig(extractor) - fakeLogs := []types.Log{{ - Address: common.HexToAddress("0xA"), - Topics: []common.Hash{common.HexToHash("0xA")}, - Data: []byte{}, - Index: 0, - }} - mockLogFetcher := &mocks.MockLogFetcher{ReturnLogs: fakeLogs} - extractor.Fetcher = mockLogFetcher - mockLogRepository := &fakes.MockHeaderSyncLogRepository{} - extractor.LogRepository = mockLogRepository - - err := extractor.ExtractLogs(constants.HeaderUnchecked) - - Expect(err).NotTo(HaveOccurred()) - Expect(mockLogRepository.PassedLogs).To(Equal(fakeLogs)) - }) - - It("returns error if persisting logs fails", func() { - addUncheckedHeader(extractor) - addFetchedLog(extractor) - addTransformerConfig(extractor) - mockLogRepository := &fakes.MockHeaderSyncLogRepository{} - mockLogRepository.CreateError = fakes.FakeError - extractor.LogRepository = mockLogRepository - - err := extractor.ExtractLogs(constants.HeaderUnchecked) - - Expect(err).To(HaveOccurred()) - Expect(err).To(MatchError(fakes.FakeError)) - }) - }) - - It("marks header checked", func() { - addFetchedLog(extractor) - addTransformerConfig(extractor) - mockCheckedHeadersRepository := &fakes.MockCheckedHeadersRepository{} - headerID := rand.Int63() - mockCheckedHeadersRepository.UncheckedHeadersReturnHeaders = []core.Header{{ID: headerID}} - extractor.CheckedHeadersRepository = mockCheckedHeadersRepository - - err := extractor.ExtractLogs(constants.HeaderUnchecked) - - Expect(err).NotTo(HaveOccurred()) - Expect(mockCheckedHeadersRepository.MarkHeaderCheckedHeaderID).To(Equal(headerID)) - }) - - It("returns error if marking header checked fails", func() { - addFetchedLog(extractor) - addTransformerConfig(extractor) - mockCheckedHeadersRepository := &fakes.MockCheckedHeadersRepository{} - mockCheckedHeadersRepository.UncheckedHeadersReturnHeaders = []core.Header{{ID: rand.Int63()}} - mockCheckedHeadersRepository.MarkHeaderCheckedReturnError = fakes.FakeError - extractor.CheckedHeadersRepository = mockCheckedHeadersRepository - - err := extractor.ExtractLogs(constants.HeaderUnchecked) - - Expect(err).To(HaveOccurred()) - Expect(err).To(MatchError(fakes.FakeError)) - }) - - It("returns nil for error if everything succeeds", func() { - addUncheckedHeader(extractor) - addTransformerConfig(extractor) - - err := extractor.ExtractLogs(constants.HeaderUnchecked) - - Expect(err).NotTo(HaveOccurred()) - }) - }) - }) -}) - -func addTransformerConfig(extractor *logs.LogExtractor) { - fakeConfig := transformer.EventTransformerConfig{ - ContractAddresses: []string{fakes.FakeAddress.Hex()}, - Topic: fakes.FakeHash.Hex(), - StartingBlockNumber: rand.Int63(), - } - extractor.AddTransformerConfig(fakeConfig) -} - -func addUncheckedHeader(extractor *logs.LogExtractor) { - mockCheckedHeadersRepository := &fakes.MockCheckedHeadersRepository{} - mockCheckedHeadersRepository.UncheckedHeadersReturnHeaders = []core.Header{{}} - extractor.CheckedHeadersRepository = mockCheckedHeadersRepository -} - -func addFetchedLog(extractor *logs.LogExtractor) { - mockLogFetcher := &mocks.MockLogFetcher{} - mockLogFetcher.ReturnLogs = []types.Log{{}} - extractor.Fetcher = mockLogFetcher -} - -func getTransformerConfig(startingBlockNumber int64) transformer.EventTransformerConfig { - return transformer.EventTransformerConfig{ - ContractAddresses: []string{fakes.FakeAddress.Hex()}, - Topic: fakes.FakeHash.Hex(), - StartingBlockNumber: startingBlockNumber, - } -} diff --git a/libraries/shared/logs/logs_suite_test.go b/libraries/shared/logs/logs_suite_test.go deleted file mode 100644 index 9a04f74f..00000000 --- a/libraries/shared/logs/logs_suite_test.go +++ /dev/null @@ -1,36 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package logs_test - -import ( - "io/ioutil" - "testing" - - "github.com/sirupsen/logrus" - - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" -) - -func TestLogs(t *testing.T) { - RegisterFailHandler(Fail) - RunSpecs(t, "Logs Suite") -} - -var _ = BeforeSuite(func() { - logrus.SetOutput(ioutil.Discard) -}) diff --git a/libraries/shared/mocks/backfiller.go b/libraries/shared/mocks/backfiller.go deleted file mode 100644 index 81a09e06..00000000 --- a/libraries/shared/mocks/backfiller.go +++ /dev/null @@ -1,58 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package mocks - -import ( - "errors" - - "github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils" -) - -// BackFiller mock for tests -type BackFiller struct { - StorageDiffsToReturn []utils.StorageDiffInput - BackFillErrs []error - PassedEndingBlock uint64 -} - -// SetStorageDiffsToReturn for tests -func (backFiller *BackFiller) SetStorageDiffsToReturn(diffs []utils.StorageDiffInput) { - backFiller.StorageDiffsToReturn = diffs -} - -// BackFill mock method -func (backFiller *BackFiller) BackFill(startingBlock, endingBlock uint64, backFill chan utils.StorageDiffInput, errChan chan error, done chan bool) error { - if endingBlock < startingBlock { - return errors.New("backfill: ending block number needs to be greater than starting block number") - } - backFiller.PassedEndingBlock = endingBlock - go func(backFill chan utils.StorageDiffInput, errChan chan error, done chan bool) { - errLen := len(backFiller.BackFillErrs) - for i, diff := range backFiller.StorageDiffsToReturn { - if i < errLen { - err := backFiller.BackFillErrs[i] - if err != nil { - errChan <- err - continue - } - } - backFill <- diff - } - done <- true - }(backFill, errChan, done) - return nil -} diff --git a/libraries/shared/mocks/event_converter.go b/libraries/shared/mocks/event_converter.go deleted file mode 100644 index 41172add..00000000 --- a/libraries/shared/mocks/event_converter.go +++ /dev/null @@ -1,44 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package mocks - -import ( - "github.com/vulcanize/vulcanizedb/libraries/shared/factories/event" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" - "github.com/vulcanize/vulcanizedb/pkg/postgres" -) - -type MockConverter struct { - ToModelsError error - ContractAbi string - LogsToConvert []core.HeaderSyncLog - ModelsToReturn []event.InsertionModel - PassedContractAddresses []string - SetDBCalled bool - ToModelsCalledCounter int -} - -func (converter *MockConverter) ToModels(abi string, logs []core.HeaderSyncLog) ([]event.InsertionModel, error) { - converter.LogsToConvert = logs - converter.ContractAbi = abi - converter.ToModelsCalledCounter = converter.ToModelsCalledCounter + 1 - return converter.ModelsToReturn, converter.ToModelsError -} - -func (converter *MockConverter) SetDB(db *postgres.DB) { - converter.SetDBCalled = true -} diff --git a/libraries/shared/mocks/event_repository.go b/libraries/shared/mocks/event_repository.go deleted file mode 100644 index c9f4cfb7..00000000 --- a/libraries/shared/mocks/event_repository.go +++ /dev/null @@ -1,44 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package mocks - -import ( - "github.com/vulcanize/vulcanizedb/libraries/shared/factories/event" - "github.com/vulcanize/vulcanizedb/pkg/postgres" -) - -type MockEventRepository struct { - createError error - PassedModels []event.InsertionModel - SetDbCalled bool - CreateCalledCounter int -} - -func (repository *MockEventRepository) Create(models []event.InsertionModel) error { - repository.PassedModels = models - repository.CreateCalledCounter++ - - return repository.createError -} - -func (repository *MockEventRepository) SetDB(db *postgres.DB) { - repository.SetDbCalled = true -} - -func (repository *MockEventRepository) SetCreateError(e error) { - repository.createError = e -} diff --git a/libraries/shared/mocks/event_transformer.go b/libraries/shared/mocks/event_transformer.go deleted file mode 100644 index 44eb3e1b..00000000 --- a/libraries/shared/mocks/event_transformer.go +++ /dev/null @@ -1,58 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package mocks - -import ( - "github.com/vulcanize/vulcanizedb/libraries/shared/transformer" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" - "github.com/vulcanize/vulcanizedb/pkg/eth/fakes" - "github.com/vulcanize/vulcanizedb/pkg/postgres" -) - -type MockEventTransformer struct { - ExecuteWasCalled bool - ExecuteError error - PassedLogs []core.HeaderSyncLog - config transformer.EventTransformerConfig -} - -func (t *MockEventTransformer) Execute(logs []core.HeaderSyncLog) error { - if t.ExecuteError != nil { - return t.ExecuteError - } - t.ExecuteWasCalled = true - t.PassedLogs = logs - return nil -} - -func (t *MockEventTransformer) GetConfig() transformer.EventTransformerConfig { - return t.config -} - -func (t *MockEventTransformer) SetTransformerConfig(config transformer.EventTransformerConfig) { - t.config = config -} - -func (t *MockEventTransformer) FakeTransformerInitializer(db *postgres.DB) transformer.EventTransformer { - return t -} - -var FakeTransformerConfig = transformer.EventTransformerConfig{ - TransformerName: "FakeTransformer", - ContractAddresses: []string{fakes.FakeAddress.Hex()}, - Topic: fakes.FakeHash.Hex(), -} diff --git a/libraries/shared/mocks/log_delegator.go b/libraries/shared/mocks/log_delegator.go deleted file mode 100644 index b627877d..00000000 --- a/libraries/shared/mocks/log_delegator.go +++ /dev/null @@ -1,45 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package mocks - -import ( - "github.com/vulcanize/vulcanizedb/libraries/shared/transformer" -) - -type MockLogDelegator struct { - AddedTransformers []transformer.EventTransformer - DelegateCallCount int - DelegateErrors []error -} - -func (delegator *MockLogDelegator) AddTransformer(t transformer.EventTransformer) { - delegator.AddedTransformers = append(delegator.AddedTransformers, t) -} - -func (delegator *MockLogDelegator) DelegateLogs() error { - delegator.DelegateCallCount++ - if len(delegator.DelegateErrors) > 1 { - var delegateErrorThisRun error - delegateErrorThisRun, delegator.DelegateErrors = delegator.DelegateErrors[0], delegator.DelegateErrors[1:] - return delegateErrorThisRun - } else if len(delegator.DelegateErrors) == 1 { - thisErr := delegator.DelegateErrors[0] - delegator.DelegateErrors = []error{} - return thisErr - } - return nil -} diff --git a/libraries/shared/mocks/log_extractor.go b/libraries/shared/mocks/log_extractor.go deleted file mode 100644 index 8a7be9bc..00000000 --- a/libraries/shared/mocks/log_extractor.go +++ /dev/null @@ -1,48 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package mocks - -import ( - "github.com/vulcanize/vulcanizedb/libraries/shared/constants" - "github.com/vulcanize/vulcanizedb/libraries/shared/transformer" -) - -type MockLogExtractor struct { - AddedConfigs []transformer.EventTransformerConfig - AddTransformerConfigError error - ExtractLogsCount int - ExtractLogsErrors []error -} - -func (extractor *MockLogExtractor) AddTransformerConfig(config transformer.EventTransformerConfig) error { - extractor.AddedConfigs = append(extractor.AddedConfigs, config) - return extractor.AddTransformerConfigError -} - -func (extractor *MockLogExtractor) ExtractLogs(recheckHeaders constants.TransformerExecution) error { - extractor.ExtractLogsCount++ - if len(extractor.ExtractLogsErrors) > 1 { - var errorThisRun error - errorThisRun, extractor.ExtractLogsErrors = extractor.ExtractLogsErrors[0], extractor.ExtractLogsErrors[1:] - return errorThisRun - } else if len(extractor.ExtractLogsErrors) == 1 { - thisErr := extractor.ExtractLogsErrors[0] - extractor.ExtractLogsErrors = []error{} - return thisErr - } - return nil -} diff --git a/libraries/shared/mocks/log_fetcher.go b/libraries/shared/mocks/log_fetcher.go deleted file mode 100644 index bb01fec3..00000000 --- a/libraries/shared/mocks/log_fetcher.go +++ /dev/null @@ -1,40 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package mocks - -import ( - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/core/types" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" -) - -type MockLogFetcher struct { - ContractAddresses []common.Address - FetchCalled bool - MissingHeader core.Header - ReturnError error - ReturnLogs []types.Log - Topics []common.Hash -} - -func (fetcher *MockLogFetcher) FetchLogs(contractAddresses []common.Address, topics []common.Hash, missingHeader core.Header) ([]types.Log, error) { - fetcher.FetchCalled = true - fetcher.ContractAddresses = contractAddresses - fetcher.Topics = topics - fetcher.MissingHeader = missingHeader - return fetcher.ReturnLogs, fetcher.ReturnError -} diff --git a/libraries/shared/mocks/state_diff_fetcher.go b/libraries/shared/mocks/state_diff_fetcher.go deleted file mode 100644 index 03a38dc8..00000000 --- a/libraries/shared/mocks/state_diff_fetcher.go +++ /dev/null @@ -1,50 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package mocks - -import ( - "errors" - "sync/atomic" - - "github.com/ethereum/go-ethereum/statediff" -) - -// StateDiffFetcher mock for tests -type StateDiffFetcher struct { - PayloadsToReturn map[uint64]statediff.Payload - FetchErrs map[uint64]error - CalledAtBlockHeights [][]uint64 - CalledTimes int64 -} - -// FetchStateDiffsAt mock method -func (fetcher *StateDiffFetcher) FetchStateDiffsAt(blockHeights []uint64) ([]statediff.Payload, error) { - if fetcher.PayloadsToReturn == nil { - return nil, errors.New("mock StateDiffFetcher needs to be initialized with payloads to return") - } - atomic.AddInt64(&fetcher.CalledTimes, 1) // thread-safe increment - fetcher.CalledAtBlockHeights = append(fetcher.CalledAtBlockHeights, blockHeights) - results := make([]statediff.Payload, 0, len(blockHeights)) - for _, height := range blockHeights { - results = append(results, fetcher.PayloadsToReturn[height]) - err, ok := fetcher.FetchErrs[height] - if ok && err != nil { - return nil, err - } - } - return results, nil -} diff --git a/libraries/shared/mocks/statediff_streamer.go b/libraries/shared/mocks/statediff_streamer.go deleted file mode 100644 index cd387ee6..00000000 --- a/libraries/shared/mocks/statediff_streamer.go +++ /dev/null @@ -1,43 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package mocks - -import ( - "github.com/ethereum/go-ethereum/rpc" - "github.com/ethereum/go-ethereum/statediff" -) - -// StateDiffStreamer is the underlying struct for the Streamer interface -type StateDiffStreamer struct { - PassedPayloadChan chan statediff.Payload - ReturnSub *rpc.ClientSubscription - ReturnErr error - StreamPayloads []statediff.Payload -} - -// Stream is the main loop for subscribing to data from the Geth state diff process -func (sds *StateDiffStreamer) Stream(payloadChan chan statediff.Payload) (*rpc.ClientSubscription, error) { - sds.PassedPayloadChan = payloadChan - - go func() { - for _, payload := range sds.StreamPayloads { - sds.PassedPayloadChan <- payload - } - }() - - return sds.ReturnSub, sds.ReturnErr -} diff --git a/libraries/shared/mocks/storage_fetcher.go b/libraries/shared/mocks/storage_fetcher.go deleted file mode 100644 index 7eb62555..00000000 --- a/libraries/shared/mocks/storage_fetcher.go +++ /dev/null @@ -1,42 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package mocks - -import ( - "github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils" -) - -// StorageFetcher is a mock fetcher for use in tests with backfilling -type StorageFetcher struct { - DiffsToReturn []utils.StorageDiffInput - ErrsToReturn []error -} - -// NewStorageFetcher returns a new StorageFetcher -func NewStorageFetcher() *StorageFetcher { - return &StorageFetcher{} -} - -// FetchStorageDiffs mock method -func (fetcher *StorageFetcher) FetchStorageDiffs(out chan<- utils.StorageDiffInput, errs chan<- error) { - for _, err := range fetcher.ErrsToReturn { - errs <- err - } - for _, diff := range fetcher.DiffsToReturn { - out <- diff - } -} diff --git a/libraries/shared/mocks/storage_keys_loader.go b/libraries/shared/mocks/storage_keys_loader.go deleted file mode 100644 index 747879ad..00000000 --- a/libraries/shared/mocks/storage_keys_loader.go +++ /dev/null @@ -1,39 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package mocks - -import ( - "github.com/ethereum/go-ethereum/common" - "github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils" - "github.com/vulcanize/vulcanizedb/pkg/postgres" -) - -type MockStorageKeysLoader struct { - LoadMappingsCallCount int - LoadMappingsError error - SetDBCalled bool - StorageKeyMappings map[common.Hash]utils.StorageValueMetadata -} - -func (loader *MockStorageKeysLoader) LoadMappings() (map[common.Hash]utils.StorageValueMetadata, error) { - loader.LoadMappingsCallCount++ - return loader.StorageKeyMappings, loader.LoadMappingsError -} - -func (loader *MockStorageKeysLoader) SetDB(db *postgres.DB) { - loader.SetDBCalled = true -} diff --git a/libraries/shared/mocks/storage_keys_lookup.go b/libraries/shared/mocks/storage_keys_lookup.go deleted file mode 100644 index 6c047523..00000000 --- a/libraries/shared/mocks/storage_keys_lookup.go +++ /dev/null @@ -1,38 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package mocks - -import ( - "github.com/ethereum/go-ethereum/common" - "github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils" - "github.com/vulcanize/vulcanizedb/pkg/postgres" -) - -type MockStorageKeysLookup struct { - Metadata utils.StorageValueMetadata - LookupCalled bool - LookupErr error -} - -func (mappings *MockStorageKeysLookup) Lookup(key common.Hash) (utils.StorageValueMetadata, error) { - mappings.LookupCalled = true - return mappings.Metadata, mappings.LookupErr -} - -func (*MockStorageKeysLookup) SetDB(db *postgres.DB) { - panic("implement me") -} diff --git a/libraries/shared/mocks/storage_queue.go b/libraries/shared/mocks/storage_queue.go deleted file mode 100644 index c13585dd..00000000 --- a/libraries/shared/mocks/storage_queue.go +++ /dev/null @@ -1,59 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package mocks - -import ( - "github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils" -) - -// MockStorageQueue for tests -type MockStorageQueue struct { - AddCalled bool - AddError error - AddPassedDiffs []utils.PersistedStorageDiff - DeleteErr error - DeletePassedIds []int64 - GetAllErr error - DiffsToReturn []utils.PersistedStorageDiff - GetAllCalled bool -} - -// Add mock method -func (queue *MockStorageQueue) Add(diff utils.PersistedStorageDiff) error { - queue.AddCalled = true - queue.AddPassedDiffs = append(queue.AddPassedDiffs, diff) - return queue.AddError -} - -// Delete mock method -func (queue *MockStorageQueue) Delete(id int64) error { - queue.DeletePassedIds = append(queue.DeletePassedIds, id) - var diffs []utils.PersistedStorageDiff - for _, diff := range queue.DiffsToReturn { - if diff.ID != id { - diffs = append(diffs, diff) - } - } - queue.DiffsToReturn = diffs - return queue.DeleteErr -} - -// GetAll mock method -func (queue *MockStorageQueue) GetAll() ([]utils.PersistedStorageDiff, error) { - queue.GetAllCalled = true - return queue.DiffsToReturn, queue.GetAllErr -} diff --git a/libraries/shared/mocks/storage_repository.go b/libraries/shared/mocks/storage_repository.go deleted file mode 100644 index 1a4c340d..00000000 --- a/libraries/shared/mocks/storage_repository.go +++ /dev/null @@ -1,40 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package mocks - -import ( - "github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils" - "github.com/vulcanize/vulcanizedb/pkg/postgres" -) - -type MockStorageRepository struct { - CreateErr error - PassedDiffID int64 - PassedMetadata utils.StorageValueMetadata - PassedValue interface{} -} - -func (repository *MockStorageRepository) Create(diffID int64, metadata utils.StorageValueMetadata, value interface{}) error { - repository.PassedDiffID = diffID - repository.PassedMetadata = metadata - repository.PassedValue = value - return repository.CreateErr -} - -func (*MockStorageRepository) SetDB(db *postgres.DB) { - panic("implement me") -} diff --git a/libraries/shared/mocks/storage_transformer.go b/libraries/shared/mocks/storage_transformer.go deleted file mode 100644 index 4f5b7a6b..00000000 --- a/libraries/shared/mocks/storage_transformer.go +++ /dev/null @@ -1,48 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package mocks - -import ( - "github.com/ethereum/go-ethereum/common" - - "github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils" - "github.com/vulcanize/vulcanizedb/libraries/shared/transformer" - "github.com/vulcanize/vulcanizedb/pkg/postgres" -) - -// MockStorageTransformer for tests -type MockStorageTransformer struct { - KeccakOfAddress common.Hash - ExecuteErr error - PassedDiffs []utils.PersistedStorageDiff -} - -// Execute mock method -func (transformer *MockStorageTransformer) Execute(diff utils.PersistedStorageDiff) error { - transformer.PassedDiffs = append(transformer.PassedDiffs, diff) - return transformer.ExecuteErr -} - -// KeccakContractAddress mock method -func (transformer *MockStorageTransformer) KeccakContractAddress() common.Hash { - return transformer.KeccakOfAddress -} - -// FakeTransformerInitializer mock method -func (transformer *MockStorageTransformer) FakeTransformerInitializer(db *postgres.DB) transformer.StorageTransformer { - return transformer -} diff --git a/libraries/shared/repository/address_repository.go b/libraries/shared/repository/address_repository.go deleted file mode 100644 index 9e4ce40b..00000000 --- a/libraries/shared/repository/address_repository.go +++ /dev/null @@ -1,75 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize -// -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. -// -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. -// -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package repository - -import ( - "github.com/ethereum/go-ethereum/common" - "github.com/jmoiron/sqlx" - "github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils" - "github.com/vulcanize/vulcanizedb/pkg/postgres" -) - -const getOrCreateAddressQuery = `WITH addressId AS ( - INSERT INTO addresses (address, hashed_address) VALUES ($1, $2) ON CONFLICT DO NOTHING RETURNING id - ) - SELECT id FROM addresses WHERE address = $1 - UNION - SELECT id FROM addressId` - -func GetOrCreateAddress(db *postgres.DB, address string) (int64, error) { - checksumAddress := getChecksumAddress(address) - hashedAddress := utils.HexToKeccak256Hash(checksumAddress).Hex() - - var addressID int64 - getOrCreateErr := db.Get(&addressID, getOrCreateAddressQuery, checksumAddress, hashedAddress) - - return addressID, getOrCreateErr -} - -func GetOrCreateAddressInTransaction(tx *sqlx.Tx, address string) (int64, error) { - checksumAddress := getChecksumAddress(address) - hashedAddress := utils.HexToKeccak256Hash(checksumAddress).Hex() - - var addressID int64 - getOrCreateErr := tx.Get(&addressID, getOrCreateAddressQuery, checksumAddress, hashedAddress) - - return addressID, getOrCreateErr -} - -func GetAddressByID(db *postgres.DB, id int64) (string, error) { - var address string - getErr := db.Get(&address, `SELECT address FROM public.addresses WHERE id = $1`, id) - return address, getErr -} - -func getChecksumAddress(address string) string { - stringAddressToCommonAddress := common.HexToAddress(address) - return stringAddressToCommonAddress.Hex() -} diff --git a/libraries/shared/repository/address_repository_test.go b/libraries/shared/repository/address_repository_test.go deleted file mode 100644 index 7c147ee4..00000000 --- a/libraries/shared/repository/address_repository_test.go +++ /dev/null @@ -1,183 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize -// -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. -// -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. -// -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package repository_test - -import ( - "strings" - - "github.com/jmoiron/sqlx" - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - - "github.com/vulcanize/vulcanizedb/libraries/shared/repository" - "github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils" - "github.com/vulcanize/vulcanizedb/pkg/eth/fakes" - "github.com/vulcanize/vulcanizedb/pkg/postgres" - "github.com/vulcanize/vulcanizedb/test_config" -) - -var _ = Describe("address lookup", func() { - var ( - db *postgres.DB - address = fakes.FakeAddress.Hex() - ) - BeforeEach(func() { - db = test_config.NewTestDB(test_config.NewTestNode()) - test_config.CleanTestDB(db) - }) - - AfterEach(func() { - test_config.CleanTestDB(db) - }) - - type dbAddress struct { - Id int64 - Address string - HashedAddress string `db:"hashed_address"` - } - - Describe("GetOrCreateAddress", func() { - It("creates an address record", func() { - addressId, createErr := repository.GetOrCreateAddress(db, address) - Expect(createErr).NotTo(HaveOccurred()) - - var actualAddress dbAddress - getErr := db.Get(&actualAddress, `SELECT id, address, hashed_address FROM public.addresses LIMIT 1`) - Expect(getErr).NotTo(HaveOccurred()) - hashedAddress := utils.HexToKeccak256Hash(address).Hex() - expectedAddress := dbAddress{Id: addressId, Address: address, HashedAddress: hashedAddress} - Expect(actualAddress).To(Equal(expectedAddress)) - }) - - It("returns the existing record id if the address already exists", func() { - createId, createErr := repository.GetOrCreateAddress(db, address) - Expect(createErr).NotTo(HaveOccurred()) - - getId, getErr := repository.GetOrCreateAddress(db, address) - Expect(getErr).NotTo(HaveOccurred()) - - var addressCount int - addressErr := db.Get(&addressCount, `SELECT count(*) FROM public.addresses`) - Expect(addressErr).NotTo(HaveOccurred()) - Expect(addressCount).To(Equal(1)) - Expect(createId).To(Equal(getId)) - }) - - It("gets upper-cased addresses", func() { - upperAddress := strings.ToUpper(address) - upperAddressId, createErr := repository.GetOrCreateAddress(db, upperAddress) - Expect(createErr).NotTo(HaveOccurred()) - - mixedCaseAddressId, getErr := repository.GetOrCreateAddress(db, address) - Expect(getErr).NotTo(HaveOccurred()) - Expect(upperAddressId).To(Equal(mixedCaseAddressId)) - }) - - It("gets lower-cased addresses", func() { - lowerAddress := strings.ToLower(address) - upperAddressId, createErr := repository.GetOrCreateAddress(db, lowerAddress) - Expect(createErr).NotTo(HaveOccurred()) - - mixedCaseAddressId, getErr := repository.GetOrCreateAddress(db, address) - Expect(getErr).NotTo(HaveOccurred()) - Expect(upperAddressId).To(Equal(mixedCaseAddressId)) - }) - }) - - Describe("GetOrCreateAddressInTransaction", func() { - var ( - tx *sqlx.Tx - txErr error - ) - BeforeEach(func() { - tx, txErr = db.Beginx() - Expect(txErr).NotTo(HaveOccurred()) - }) - - AfterEach(func() { - tx.Rollback() - }) - - It("creates an address record", func() { - addressId, createErr := repository.GetOrCreateAddressInTransaction(tx, address) - Expect(createErr).NotTo(HaveOccurred()) - commitErr := tx.Commit() - Expect(commitErr).NotTo(HaveOccurred()) - - var actualAddress dbAddress - getErr := db.Get(&actualAddress, `SELECT id, address, hashed_address FROM public.addresses LIMIT 1`) - Expect(getErr).NotTo(HaveOccurred()) - hashedAddress := utils.HexToKeccak256Hash(address).Hex() - expectedAddress := dbAddress{Id: addressId, Address: address, HashedAddress: hashedAddress} - Expect(actualAddress).To(Equal(expectedAddress)) - }) - - It("returns the existing record id if the address already exists", func() { - _, createErr := repository.GetOrCreateAddressInTransaction(tx, address) - Expect(createErr).NotTo(HaveOccurred()) - - _, getErr := repository.GetOrCreateAddressInTransaction(tx, address) - Expect(getErr).NotTo(HaveOccurred()) - tx.Commit() - - var addressCount int - addressErr := db.Get(&addressCount, `SELECT count(*) FROM public.addresses`) - Expect(addressErr).NotTo(HaveOccurred()) - }) - - It("gets upper-cased addresses", func() { - upperAddress := strings.ToUpper(address) - upperAddressId, createErr := repository.GetOrCreateAddressInTransaction(tx, upperAddress) - Expect(createErr).NotTo(HaveOccurred()) - - mixedCaseAddressId, getErr := repository.GetOrCreateAddressInTransaction(tx, address) - Expect(getErr).NotTo(HaveOccurred()) - tx.Commit() - - Expect(upperAddressId).To(Equal(mixedCaseAddressId)) - }) - - It("gets lower-cased addresses", func() { - lowerAddress := strings.ToLower(address) - upperAddressId, createErr := repository.GetOrCreateAddressInTransaction(tx, lowerAddress) - Expect(createErr).NotTo(HaveOccurred()) - - mixedCaseAddressId, getErr := repository.GetOrCreateAddressInTransaction(tx, address) - Expect(getErr).NotTo(HaveOccurred()) - tx.Commit() - - Expect(upperAddressId).To(Equal(mixedCaseAddressId)) - }) - }) - - Describe("GetAddressByID", func() { - It("gets and address by it's id", func() { - addressId, createErr := repository.GetOrCreateAddress(db, address) - Expect(createErr).NotTo(HaveOccurred()) - - actualAddress, getErr := repository.GetAddressByID(db, addressId) - Expect(getErr).NotTo(HaveOccurred()) - Expect(actualAddress).To(Equal(address)) - }) - - It("returns an error if the id doesn't exist", func() { - _, getErr := repository.GetAddressByID(db, 0) - Expect(getErr).To(HaveOccurred()) - Expect(getErr).To(MatchError("sql: no rows in result set")) - }) - }) -}) diff --git a/libraries/shared/repository/repository_suite_test.go b/libraries/shared/repository/repository_suite_test.go deleted file mode 100644 index 60b20bc1..00000000 --- a/libraries/shared/repository/repository_suite_test.go +++ /dev/null @@ -1,35 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package repository_test - -import ( - "io/ioutil" - "testing" - - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - log "github.com/sirupsen/logrus" -) - -func TestFactories(t *testing.T) { - RegisterFailHandler(Fail) - RunSpecs(t, "Shared Repository Suite") -} - -var _ = BeforeSuite(func() { - log.SetOutput(ioutil.Discard) -}) diff --git a/libraries/shared/storage/backfiller.go b/libraries/shared/storage/backfiller.go deleted file mode 100644 index 42bf8c98..00000000 --- a/libraries/shared/storage/backfiller.go +++ /dev/null @@ -1,150 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package storage - -import ( - "fmt" - "sync/atomic" - - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/rlp" - "github.com/ethereum/go-ethereum/statediff" - "github.com/sirupsen/logrus" - - "github.com/vulcanize/vulcanizedb/libraries/shared/fetcher" - "github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils" - "github.com/vulcanize/vulcanizedb/libraries/shared/utilities" -) - -const ( - DefaultMaxBatchSize uint64 = 100 - defaultMaxBatchNumber int64 = 10 -) - -// BackFiller is the backfilling interface -type BackFiller interface { - BackFill(startingBlock, endingBlock uint64, backFill chan utils.StorageDiffInput, errChan chan error, done chan bool) error -} - -// backFiller is the backfilling struct -type backFiller struct { - fetcher fetcher.StateDiffFetcher - batchSize uint64 - startingBlock uint64 -} - -// NewStorageBackFiller returns a BackFiller -func NewStorageBackFiller(fetcher fetcher.StateDiffFetcher, batchSize uint64) BackFiller { - if batchSize == 0 { - batchSize = DefaultMaxBatchSize - } - return &backFiller{ - fetcher: fetcher, - batchSize: batchSize, - } -} - -// BackFill fetches, processes, and returns utils.StorageDiffs over a range of blocks -// It splits a large range up into smaller chunks, batch fetching and processing those chunks concurrently -func (bf *backFiller) BackFill(startingBlock, endingBlock uint64, backFill chan utils.StorageDiffInput, errChan chan error, done chan bool) error { - logrus.Infof("going to fill in gap from %d to %d", startingBlock, endingBlock) - - // break the range up into bins of smaller ranges - blockRangeBins, err := utilities.GetBlockHeightBins(startingBlock, endingBlock, bf.batchSize) - if err != nil { - return err - } - // int64 for atomic incrementing and decrementing to track the number of active processing goroutines we have - var activeCount int64 - // channel for processing goroutines to signal when they are done - processingDone := make(chan [2]uint64) - forwardDone := make(chan bool) - - // for each block range bin spin up a goroutine to batch fetch and process state diffs in that range - go func() { - for _, blockHeights := range blockRangeBins { - // if we have reached our limit of active goroutines - // wait for one to finish before starting the next - if atomic.AddInt64(&activeCount, 1) > defaultMaxBatchNumber { - // this blocks until a process signals it has finished - <-forwardDone - } - go bf.backFillRange(blockHeights, backFill, errChan, processingDone) - } - }() - - // goroutine that listens on the processingDone chan - // keeps track of the number of processing goroutines that have finished - // when they have all finished, sends the final signal out - go func() { - goroutinesFinished := 0 - for { - select { - case doneWithHeights := <-processingDone: - atomic.AddInt64(&activeCount, -1) - select { - // if we are waiting for a process to finish, signal that one has - case forwardDone <- true: - default: - } - logrus.Infof("finished fetching gap sub-bin from %d to %d", doneWithHeights[0], doneWithHeights[1]) - goroutinesFinished++ - if goroutinesFinished >= len(blockRangeBins) { - done <- true - return - } - } - } - }() - - return nil -} - -func (bf *backFiller) backFillRange(blockHeights []uint64, diffChan chan utils.StorageDiffInput, errChan chan error, doneChan chan [2]uint64) { - payloads, fetchErr := bf.fetcher.FetchStateDiffsAt(blockHeights) - if fetchErr != nil { - errChan <- fetchErr - } - for _, payload := range payloads { - stateDiff := new(statediff.StateObject) - stateDiffDecodeErr := rlp.DecodeBytes(payload.StateObjectRlp, stateDiff) - if stateDiffDecodeErr != nil { - errChan <- stateDiffDecodeErr - continue - } - accounts := utils.GetAccountsFromDiff(*stateDiff) - for _, account := range accounts { - logrus.Trace(fmt.Sprintf("iterating through %d Storage values on account with key %s", len(account.StorageNodes), common.BytesToHash(account.LeafKey).Hex())) - for _, storage := range account.StorageNodes { - diff, formatErr := utils.FromGethStateDiff(account, stateDiff, storage) - if formatErr != nil { - logrus.Error("failed to format utils.StorageDiff from storage with key: ", common.BytesToHash(storage.LeafKey), "from account with key: ", common.BytesToHash(account.LeafKey)) - errChan <- formatErr - continue - } - logrus.Trace("adding storage diff to results", - "keccak of address: ", diff.HashedAddress.Hex(), - "block height: ", diff.BlockHeight, - "storage key: ", diff.StorageKey.Hex(), - "storage value: ", diff.StorageValue.Hex()) - diffChan <- diff - } - } - } - // when this is done, send out a signal - doneChan <- [2]uint64{blockHeights[0], blockHeights[len(blockHeights)-1]} -} diff --git a/libraries/shared/storage/backfiller_test.go b/libraries/shared/storage/backfiller_test.go deleted file mode 100644 index 75a14c40..00000000 --- a/libraries/shared/storage/backfiller_test.go +++ /dev/null @@ -1,237 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package storage_test - -import ( - "errors" - - "github.com/ethereum/go-ethereum/statediff" - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - - "github.com/vulcanize/vulcanizedb/libraries/shared/mocks" - "github.com/vulcanize/vulcanizedb/libraries/shared/storage" - "github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils" - "github.com/vulcanize/vulcanizedb/libraries/shared/test_data" -) - -var _ = Describe("BackFiller", func() { - Describe("BackFill", func() { - var ( - mockFetcher *mocks.StateDiffFetcher - backFiller storage.BackFiller - ) - BeforeEach(func() { - mockFetcher = new(mocks.StateDiffFetcher) - mockFetcher.PayloadsToReturn = map[uint64]statediff.Payload{ - test_data.BlockNumber.Uint64(): test_data.MockStatediffPayload, - test_data.BlockNumber2.Uint64(): test_data.MockStatediffPayload2, - } - }) - - It("batch calls statediff_stateDiffAt", func() { - backFiller = storage.NewStorageBackFiller(mockFetcher, 100) - backFill := make(chan utils.StorageDiffInput) - done := make(chan bool) - errChan := make(chan error) - backFillInitErr := backFiller.BackFill( - test_data.BlockNumber.Uint64(), - test_data.BlockNumber2.Uint64(), - backFill, - errChan, - done) - Expect(backFillInitErr).ToNot(HaveOccurred()) - var diffs []utils.StorageDiffInput - for { - select { - case diff := <-backFill: - diffs = append(diffs, diff) - continue - case err := <-errChan: - Expect(err).ToNot(HaveOccurred()) - continue - case <-done: - break - } - break - } - Expect(mockFetcher.CalledTimes).To(Equal(int64(1))) - Expect(len(diffs)).To(Equal(4)) - Expect(containsDiff(diffs, test_data.CreatedExpectedStorageDiff)).To(BeTrue()) - Expect(containsDiff(diffs, test_data.UpdatedExpectedStorageDiff)).To(BeTrue()) - Expect(containsDiff(diffs, test_data.DeletedExpectedStorageDiff)).To(BeTrue()) - Expect(containsDiff(diffs, test_data.UpdatedExpectedStorageDiff2)).To(BeTrue()) - }) - - It("has a configurable batch size", func() { - backFiller = storage.NewStorageBackFiller(mockFetcher, 1) - backFill := make(chan utils.StorageDiffInput) - done := make(chan bool) - errChan := make(chan error) - backFillInitErr := backFiller.BackFill( - test_data.BlockNumber.Uint64(), - test_data.BlockNumber2.Uint64(), - backFill, - errChan, - done) - Expect(backFillInitErr).ToNot(HaveOccurred()) - var diffs []utils.StorageDiffInput - for { - select { - case diff := <-backFill: - diffs = append(diffs, diff) - continue - case err := <-errChan: - Expect(err).ToNot(HaveOccurred()) - continue - case <-done: - break - } - break - } - Expect(mockFetcher.CalledTimes).To(Equal(int64(2))) - Expect(len(diffs)).To(Equal(4)) - Expect(containsDiff(diffs, test_data.CreatedExpectedStorageDiff)).To(BeTrue()) - Expect(containsDiff(diffs, test_data.UpdatedExpectedStorageDiff)).To(BeTrue()) - Expect(containsDiff(diffs, test_data.DeletedExpectedStorageDiff)).To(BeTrue()) - Expect(containsDiff(diffs, test_data.UpdatedExpectedStorageDiff2)).To(BeTrue()) - }) - - It("handles bin numbers in excess of the goroutine limit (100)", func() { - payloadsToReturn := make(map[uint64]statediff.Payload, 1001) - for i := test_data.BlockNumber.Uint64(); i <= test_data.BlockNumber.Uint64()+1000; i++ { - payloadsToReturn[i] = test_data.MockStatediffPayload - } - mockFetcher.PayloadsToReturn = payloadsToReturn - // batch size of 2 with 1001 block range => 501 bins - backFiller = storage.NewStorageBackFiller(mockFetcher, 2) - backFill := make(chan utils.StorageDiffInput) - done := make(chan bool) - errChan := make(chan error) - backFillInitErr := backFiller.BackFill( - test_data.BlockNumber.Uint64(), - test_data.BlockNumber.Uint64()+1000, - backFill, - errChan, - done) - Expect(backFillInitErr).ToNot(HaveOccurred()) - var diffs []utils.StorageDiffInput - for { - select { - case diff := <-backFill: - diffs = append(diffs, diff) - continue - case err := <-errChan: - Expect(err).ToNot(HaveOccurred()) - continue - case <-done: - break - } - break - } - Expect(mockFetcher.CalledTimes).To(Equal(int64(501))) - Expect(len(diffs)).To(Equal(3003)) - Expect(containsDiff(diffs, test_data.CreatedExpectedStorageDiff)).To(BeTrue()) - Expect(containsDiff(diffs, test_data.UpdatedExpectedStorageDiff)).To(BeTrue()) - Expect(containsDiff(diffs, test_data.DeletedExpectedStorageDiff)).To(BeTrue()) - }) - - It("passes fetcher errors forward", func() { - mockFetcher.FetchErrs = map[uint64]error{ - test_data.BlockNumber.Uint64(): errors.New("mock fetcher error"), - } - backFiller = storage.NewStorageBackFiller(mockFetcher, 1) - backFill := make(chan utils.StorageDiffInput) - done := make(chan bool) - errChan := make(chan error) - backFillInitErr := backFiller.BackFill( - test_data.BlockNumber.Uint64(), - test_data.BlockNumber2.Uint64(), - backFill, - errChan, - done) - Expect(backFillInitErr).ToNot(HaveOccurred()) - var numOfErrs int - var diffs []utils.StorageDiffInput - for { - select { - case diff := <-backFill: - diffs = append(diffs, diff) - continue - case err := <-errChan: - Expect(err).To(HaveOccurred()) - Expect(err.Error()).To(Equal("mock fetcher error")) - numOfErrs++ - continue - case <-done: - break - } - break - } - Expect(mockFetcher.CalledTimes).To(Equal(int64(2))) - Expect(numOfErrs).To(Equal(1)) - Expect(len(diffs)).To(Equal(1)) - Expect(containsDiff(diffs, test_data.UpdatedExpectedStorageDiff2)).To(BeTrue()) - - mockFetcher.FetchErrs = map[uint64]error{ - test_data.BlockNumber.Uint64(): errors.New("mock fetcher error"), - test_data.BlockNumber2.Uint64(): errors.New("mock fetcher error"), - } - mockFetcher.CalledTimes = 0 - backFiller = storage.NewStorageBackFiller(mockFetcher, 1) - backFill = make(chan utils.StorageDiffInput) - done = make(chan bool) - errChan = make(chan error) - backFillInitErr = backFiller.BackFill( - test_data.BlockNumber.Uint64(), - test_data.BlockNumber2.Uint64(), - backFill, - errChan, - done) - Expect(backFillInitErr).ToNot(HaveOccurred()) - numOfErrs = 0 - diffs = []utils.StorageDiffInput{} - for { - select { - case diff := <-backFill: - diffs = append(diffs, diff) - continue - case err := <-errChan: - Expect(err).To(HaveOccurred()) - Expect(err.Error()).To(Equal("mock fetcher error")) - numOfErrs++ - continue - case <-done: - break - } - break - } - Expect(mockFetcher.CalledTimes).To(Equal(int64(2))) - Expect(numOfErrs).To(Equal(2)) - Expect(len(diffs)).To(Equal(0)) - }) - }) -}) - -func containsDiff(diffs []utils.StorageDiffInput, diff utils.StorageDiffInput) bool { - for _, d := range diffs { - if d == diff { - return true - } - } - return false -} diff --git a/libraries/shared/storage/storage_queue.go b/libraries/shared/storage/storage_queue.go deleted file mode 100644 index f7102170..00000000 --- a/libraries/shared/storage/storage_queue.go +++ /dev/null @@ -1,55 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package storage - -import ( - "github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils" - "github.com/vulcanize/vulcanizedb/pkg/postgres" -) - -type IStorageQueue interface { - Add(diff utils.PersistedStorageDiff) error - Delete(id int64) error - GetAll() ([]utils.PersistedStorageDiff, error) -} - -type StorageQueue struct { - db *postgres.DB -} - -func NewStorageQueue(db *postgres.DB) StorageQueue { - return StorageQueue{db: db} -} - -func (queue StorageQueue) Add(diff utils.PersistedStorageDiff) error { - _, err := queue.db.Exec(`INSERT INTO public.queued_storage (diff_id) VALUES - ($1) ON CONFLICT DO NOTHING`, diff.ID) - return err -} - -func (queue StorageQueue) Delete(diffID int64) error { - _, err := queue.db.Exec(`DELETE FROM public.queued_storage WHERE diff_id = $1`, diffID) - return err -} - -func (queue StorageQueue) GetAll() ([]utils.PersistedStorageDiff, error) { - var result []utils.PersistedStorageDiff - err := queue.db.Select(&result, `SELECT storage_diff.id, hashed_address, block_height, block_hash, storage_key, storage_value - FROM public.queued_storage - LEFT JOIN public.storage_diff ON queued_storage.diff_id = storage_diff.id`) - return result, err -} diff --git a/libraries/shared/storage/storage_queue_test.go b/libraries/shared/storage/storage_queue_test.go deleted file mode 100644 index 4656429f..00000000 --- a/libraries/shared/storage/storage_queue_test.go +++ /dev/null @@ -1,124 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package storage_test - -import ( - "github.com/ethereum/go-ethereum/common" - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - "github.com/vulcanize/vulcanizedb/libraries/shared/storage" - "github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils" - "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres/repositories" - "github.com/vulcanize/vulcanizedb/pkg/postgres" - "github.com/vulcanize/vulcanizedb/test_config" -) - -var _ = Describe("Storage queue", func() { - var ( - db *postgres.DB - diff utils.PersistedStorageDiff - diffRepository repositories.StorageDiffRepository - queue storage.IStorageQueue - ) - - BeforeEach(func() { - fakeAddr := "0x123456" - rawDiff := utils.StorageDiffInput{ - HashedAddress: utils.HexToKeccak256Hash(fakeAddr), - BlockHash: common.HexToHash("0x678901"), - BlockHeight: 987, - StorageKey: common.HexToHash("0x654321"), - StorageValue: common.HexToHash("0x198765"), - } - db = test_config.NewTestDB(test_config.NewTestNode()) - test_config.CleanTestDB(db) - diffRepository = repositories.NewStorageDiffRepository(db) - diffID, insertDiffErr := diffRepository.CreateStorageDiff(rawDiff) - Expect(insertDiffErr).NotTo(HaveOccurred()) - diff = utils.ToPersistedDiff(rawDiff, diffID) - queue = storage.NewStorageQueue(db) - addErr := queue.Add(diff) - Expect(addErr).NotTo(HaveOccurred()) - }) - - Describe("Add", func() { - It("adds a storage diff to the db", func() { - var result utils.PersistedStorageDiff - getErr := db.Get(&result, `SELECT storage_diff.id, hashed_address, block_hash, block_height, storage_key, storage_value - FROM public.queued_storage - LEFT JOIN public.storage_diff ON queued_storage.diff_id = storage_diff.id`) - Expect(getErr).NotTo(HaveOccurred()) - Expect(result).To(Equal(diff)) - }) - - It("does not duplicate storage diffs", func() { - addErr := queue.Add(diff) - Expect(addErr).NotTo(HaveOccurred()) - var count int - getErr := db.Get(&count, `SELECT count(*) FROM public.queued_storage`) - Expect(getErr).NotTo(HaveOccurred()) - Expect(count).To(Equal(1)) - }) - }) - - It("deletes storage diff from db", func() { - diffs, getErr := queue.GetAll() - Expect(getErr).NotTo(HaveOccurred()) - Expect(len(diffs)).To(Equal(1)) - - err := queue.Delete(diffs[0].ID) - - Expect(err).NotTo(HaveOccurred()) - remainingRows, secondGetErr := queue.GetAll() - Expect(secondGetErr).NotTo(HaveOccurred()) - Expect(len(remainingRows)).To(BeZero()) - }) - - It("gets all storage diffs from db", func() { - fakeAddr := "0x234567" - diffTwo := utils.StorageDiffInput{ - HashedAddress: utils.HexToKeccak256Hash(fakeAddr), - BlockHash: common.HexToHash("0x678902"), - BlockHeight: 988, - StorageKey: common.HexToHash("0x654322"), - StorageValue: common.HexToHash("0x198766"), - } - persistedDiffTwoID, insertDiffErr := diffRepository.CreateStorageDiff(diffTwo) - Expect(insertDiffErr).NotTo(HaveOccurred()) - persistedDiffTwo := utils.ToPersistedDiff(diffTwo, persistedDiffTwoID) - addErr := queue.Add(persistedDiffTwo) - Expect(addErr).NotTo(HaveOccurred()) - - diffs, err := queue.GetAll() - - Expect(err).NotTo(HaveOccurred()) - Expect(len(diffs)).To(Equal(2)) - Expect(diffs[0]).NotTo(Equal(diffs[1])) - Expect(diffs[0].ID).NotTo(BeZero()) - Expect(diffs[0].HashedAddress).To(Or(Equal(diff.HashedAddress), Equal(diffTwo.HashedAddress))) - Expect(diffs[0].BlockHash).To(Or(Equal(diff.BlockHash), Equal(diffTwo.BlockHash))) - Expect(diffs[0].BlockHeight).To(Or(Equal(diff.BlockHeight), Equal(diffTwo.BlockHeight))) - Expect(diffs[0].StorageKey).To(Or(Equal(diff.StorageKey), Equal(diffTwo.StorageKey))) - Expect(diffs[0].StorageValue).To(Or(Equal(diff.StorageValue), Equal(diffTwo.StorageValue))) - Expect(diffs[1].ID).NotTo(BeZero()) - Expect(diffs[1].HashedAddress).To(Or(Equal(diff.HashedAddress), Equal(diffTwo.HashedAddress))) - Expect(diffs[1].BlockHash).To(Or(Equal(diff.BlockHash), Equal(diffTwo.BlockHash))) - Expect(diffs[1].BlockHeight).To(Or(Equal(diff.BlockHeight), Equal(diffTwo.BlockHeight))) - Expect(diffs[1].StorageKey).To(Or(Equal(diff.StorageKey), Equal(diffTwo.StorageKey))) - Expect(diffs[1].StorageValue).To(Or(Equal(diff.StorageValue), Equal(diffTwo.StorageValue))) - }) -}) diff --git a/libraries/shared/storage/storage_suite_test.go b/libraries/shared/storage/storage_suite_test.go deleted file mode 100644 index cf4f9997..00000000 --- a/libraries/shared/storage/storage_suite_test.go +++ /dev/null @@ -1,35 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package storage_test - -import ( - "io/ioutil" - "testing" - - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - log "github.com/sirupsen/logrus" -) - -func TestFactories(t *testing.T) { - RegisterFailHandler(Fail) - RunSpecs(t, "Shared Storage Suite") -} - -var _ = BeforeSuite(func() { - log.SetOutput(ioutil.Discard) -}) diff --git a/libraries/shared/storage/utils/decoder.go b/libraries/shared/storage/utils/decoder.go deleted file mode 100644 index ff0273fb..00000000 --- a/libraries/shared/storage/utils/decoder.go +++ /dev/null @@ -1,106 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package utils - -import ( - "fmt" - "math/big" - - "github.com/ethereum/go-ethereum/common" -) - -const ( - bitsPerByte = 8 -) - -func Decode(diff PersistedStorageDiff, metadata StorageValueMetadata) (interface{}, error) { - switch metadata.Type { - case Uint256: - return decodeInteger(diff.StorageValue.Bytes()), nil - case Uint48: - return decodeInteger(diff.StorageValue.Bytes()), nil - case Uint128: - return decodeInteger(diff.StorageValue.Bytes()), nil - case Address: - return decodeAddress(diff.StorageValue.Bytes()), nil - case Bytes32: - return diff.StorageValue.Hex(), nil - case PackedSlot: - return decodePackedSlot(diff.StorageValue.Bytes(), metadata.PackedTypes), nil - default: - panic(fmt.Sprintf("can't decode unknown type: %d", metadata.Type)) - } -} - -func decodeInteger(raw []byte) string { - n := big.NewInt(0).SetBytes(raw) - return n.String() -} - -func decodeAddress(raw []byte) string { - return common.BytesToAddress(raw).Hex() -} - -func decodePackedSlot(raw []byte, packedTypes map[int]ValueType) map[int]string { - storageSlotData := raw - decodedStorageSlotItems := map[int]string{} - numberOfTypes := len(packedTypes) - - for position := 0; position < numberOfTypes; position++ { - //get length of remaining storage date - lengthOfStorageData := len(storageSlotData) - - //get item details (type, length, starting index, value bytes) - itemType := packedTypes[position] - lengthOfItem := getNumberOfBytes(itemType) - itemStartingIndex := lengthOfStorageData - lengthOfItem - itemValueBytes := storageSlotData[itemStartingIndex:] - - //decode item's bytes and set in results map - decodedValue := decodeIndividualItem(itemValueBytes, itemType) - decodedStorageSlotItems[position] = decodedValue - - //pop last item off raw slot data before moving on - storageSlotData = storageSlotData[0:itemStartingIndex] - } - - return decodedStorageSlotItems -} - -func decodeIndividualItem(itemBytes []byte, valueType ValueType) string { - switch valueType { - case Uint48, Uint128: - return decodeInteger(itemBytes) - case Address: - return decodeAddress(itemBytes) - default: - panic(fmt.Sprintf("can't decode unknown type: %d", valueType)) - } -} - -func getNumberOfBytes(valueType ValueType) int { - switch valueType { - case Uint48: - return 48 / bitsPerByte - case Uint128: - return 128 / bitsPerByte - case Address: - return 20 - default: - panic(fmt.Sprintf("ValueType %d not recognized", valueType)) - } -} diff --git a/libraries/shared/storage/utils/decoder_test.go b/libraries/shared/storage/utils/decoder_test.go deleted file mode 100644 index 87a602d1..00000000 --- a/libraries/shared/storage/utils/decoder_test.go +++ /dev/null @@ -1,174 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package utils_test - -import ( - "math/big" - - "github.com/ethereum/go-ethereum/common" - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - - "github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils" -) - -var _ = Describe("Storage decoder", func() { - It("decodes uint256", func() { - fakeInt := common.HexToHash("0000000000000000000000000000000000000000000000000000000000000539") - diff := utils.PersistedStorageDiff{StorageDiffInput: utils.StorageDiffInput{StorageValue: fakeInt}} - metadata := utils.StorageValueMetadata{Type: utils.Uint256} - - result, err := utils.Decode(diff, metadata) - - Expect(err).NotTo(HaveOccurred()) - Expect(result).To(Equal(big.NewInt(0).SetBytes(fakeInt.Bytes()).String())) - }) - - It("decodes uint128", func() { - fakeInt := common.HexToHash("0000000000000000000000000000000000000000000000000000000000011123") - diff := utils.PersistedStorageDiff{StorageDiffInput: utils.StorageDiffInput{StorageValue: fakeInt}} - metadata := utils.StorageValueMetadata{Type: utils.Uint128} - - result, err := utils.Decode(diff, metadata) - - Expect(err).NotTo(HaveOccurred()) - Expect(result).To(Equal(big.NewInt(0).SetBytes(fakeInt.Bytes()).String())) - }) - - It("decodes uint48", func() { - fakeInt := common.HexToHash("0000000000000000000000000000000000000000000000000000000000000123") - diff := utils.PersistedStorageDiff{StorageDiffInput: utils.StorageDiffInput{StorageValue: fakeInt}} - metadata := utils.StorageValueMetadata{Type: utils.Uint48} - - result, err := utils.Decode(diff, metadata) - - Expect(err).NotTo(HaveOccurred()) - Expect(result).To(Equal(big.NewInt(0).SetBytes(fakeInt.Bytes()).String())) - }) - - It("decodes address", func() { - fakeAddress := common.HexToAddress("0x12345") - diff := utils.PersistedStorageDiff{StorageDiffInput: utils.StorageDiffInput{StorageValue: fakeAddress.Hash()}} - metadata := utils.StorageValueMetadata{Type: utils.Address} - - result, err := utils.Decode(diff, metadata) - - Expect(err).NotTo(HaveOccurred()) - Expect(result).To(Equal(fakeAddress.Hex())) - }) - - Describe("when there are multiple items packed in the storage slot", func() { - It("decodes uint48 items", func() { - //this is a real storage data example - packedStorage := common.HexToHash("000000000000000000000000000000000000000000000002a300000000002a30") - diff := utils.PersistedStorageDiff{StorageDiffInput: utils.StorageDiffInput{StorageValue: packedStorage}} - packedTypes := map[int]utils.ValueType{} - packedTypes[0] = utils.Uint48 - packedTypes[1] = utils.Uint48 - - metadata := utils.StorageValueMetadata{ - Type: utils.PackedSlot, - PackedTypes: packedTypes, - } - - result, err := utils.Decode(diff, metadata) - decodedValues := result.(map[int]string) - - Expect(err).NotTo(HaveOccurred()) - Expect(decodedValues[0]).To(Equal(big.NewInt(0).SetBytes(common.HexToHash("2a30").Bytes()).String())) - Expect(decodedValues[1]).To(Equal(big.NewInt(0).SetBytes(common.HexToHash("2a300").Bytes()).String())) - }) - - It("decodes 5 uint48 items", func() { - //TODO: this packedStorageHex was generated by hand, it would be nice to test this against - //real storage data that has several items packed into it - packedStorageHex := "0000000A5D1AFFFFFFFFFFFE00000009F3C600000002A300000000002A30" - - packedStorage := common.HexToHash(packedStorageHex) - diff := utils.PersistedStorageDiff{StorageDiffInput: utils.StorageDiffInput{StorageValue: packedStorage}} - packedTypes := map[int]utils.ValueType{} - packedTypes[0] = utils.Uint48 - packedTypes[1] = utils.Uint48 - packedTypes[2] = utils.Uint48 - packedTypes[3] = utils.Uint48 - packedTypes[4] = utils.Uint48 - - metadata := utils.StorageValueMetadata{ - Type: utils.PackedSlot, - PackedTypes: packedTypes, - } - - result, err := utils.Decode(diff, metadata) - decodedValues := result.(map[int]string) - - Expect(err).NotTo(HaveOccurred()) - Expect(decodedValues[0]).To(Equal(big.NewInt(0).SetBytes(common.HexToHash("2a30").Bytes()).String())) - Expect(decodedValues[1]).To(Equal(big.NewInt(0).SetBytes(common.HexToHash("2a300").Bytes()).String())) - Expect(decodedValues[2]).To(Equal(big.NewInt(0).SetBytes(common.HexToHash("9F3C6").Bytes()).String())) - Expect(decodedValues[3]).To(Equal(big.NewInt(0).SetBytes(common.HexToHash("FFFFFFFFFFFE").Bytes()).String())) - Expect(decodedValues[4]).To(Equal(big.NewInt(0).SetBytes(common.HexToHash("A5D1A").Bytes()).String())) - }) - - It("decodes 2 uint128 items", func() { - //TODO: this packedStorageHex was generated by hand, it would be nice to test this against - //real storage data that has several items packed into it - packedStorageHex := "000000038D7EA4C67FF8E502B6730000" + - "0000000000000000AB54A98CEB1F0AD2" - packedStorage := common.HexToHash(packedStorageHex) - diff := utils.PersistedStorageDiff{StorageDiffInput: utils.StorageDiffInput{StorageValue: packedStorage}} - packedTypes := map[int]utils.ValueType{} - packedTypes[0] = utils.Uint128 - packedTypes[1] = utils.Uint128 - - metadata := utils.StorageValueMetadata{ - Type: utils.PackedSlot, - PackedTypes: packedTypes, - } - - result, err := utils.Decode(diff, metadata) - decodedValues := result.(map[int]string) - - Expect(err).NotTo(HaveOccurred()) - Expect(decodedValues[0]).To(Equal(big.NewInt(0).SetBytes(common.HexToHash("AB54A98CEB1F0AD2").Bytes()).String())) - Expect(decodedValues[1]).To(Equal(big.NewInt(0).SetBytes(common.HexToHash("38D7EA4C67FF8E502B6730000").Bytes()).String())) - }) - - It("decodes address + 2 uint48s", func() { - //TODO: replace with real data when available - addressHex := "0000000000000000000000000000000000012345" - packedStorage := common.HexToHash("00000002a300" + "000000002a30" + addressHex) - row := utils.PersistedStorageDiff{StorageDiffInput: utils.StorageDiffInput{StorageValue: packedStorage}} - packedTypes := map[int]utils.ValueType{} - packedTypes[0] = utils.Address - packedTypes[1] = utils.Uint48 - packedTypes[2] = utils.Uint48 - - metadata := utils.StorageValueMetadata{ - Type: utils.PackedSlot, - PackedTypes: packedTypes, - } - - result, err := utils.Decode(row, metadata) - decodedValues := result.(map[int]string) - - Expect(err).NotTo(HaveOccurred()) - Expect(decodedValues[0]).To(Equal("0x" + addressHex)) - Expect(decodedValues[1]).To(Equal(big.NewInt(0).SetBytes(common.HexToHash("2a30").Bytes()).String())) - Expect(decodedValues[2]).To(Equal(big.NewInt(0).SetBytes(common.HexToHash("2a300").Bytes()).String())) - }) - }) -}) diff --git a/libraries/shared/storage/utils/diff.go b/libraries/shared/storage/utils/diff.go deleted file mode 100644 index 9010dd64..00000000 --- a/libraries/shared/storage/utils/diff.go +++ /dev/null @@ -1,89 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package utils - -import ( - "strconv" - - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/crypto" - "github.com/ethereum/go-ethereum/rlp" - "github.com/ethereum/go-ethereum/statediff" -) - -const ExpectedRowLength = 5 - -type StorageDiffInput struct { - HashedAddress common.Hash `db:"hashed_address"` - BlockHash common.Hash `db:"block_hash"` - BlockHeight int `db:"block_height"` - StorageKey common.Hash `db:"storage_key"` - StorageValue common.Hash `db:"storage_value"` -} - -type PersistedStorageDiff struct { - StorageDiffInput - ID int64 -} - -func FromParityCsvRow(csvRow []string) (StorageDiffInput, error) { - if len(csvRow) != ExpectedRowLength { - return StorageDiffInput{}, ErrRowMalformed{Length: len(csvRow)} - } - height, err := strconv.Atoi(csvRow[2]) - if err != nil { - return StorageDiffInput{}, err - } - return StorageDiffInput{ - HashedAddress: HexToKeccak256Hash(csvRow[0]), - BlockHash: common.HexToHash(csvRow[1]), - BlockHeight: height, - StorageKey: common.HexToHash(csvRow[3]), - StorageValue: common.HexToHash(csvRow[4]), - }, nil -} - -func FromGethStateDiff(account statediff.StateNode, stateDiff *statediff.StateObject, storage statediff.StorageNode) (StorageDiffInput, error) { - var decodedValue []byte - err := rlp.DecodeBytes(storage.NodeValue, &decodedValue) - if err != nil { - return StorageDiffInput{}, err - } - - return StorageDiffInput{ - HashedAddress: common.BytesToHash(account.LeafKey), - BlockHash: stateDiff.BlockHash, - BlockHeight: int(stateDiff.BlockNumber.Int64()), - StorageKey: common.BytesToHash(storage.LeafKey), - StorageValue: common.BytesToHash(decodedValue), - }, nil -} - -func ToPersistedDiff(raw StorageDiffInput, id int64) PersistedStorageDiff { - return PersistedStorageDiff{ - StorageDiffInput: raw, - ID: id, - } -} - -func HexToKeccak256Hash(hex string) common.Hash { - return crypto.Keccak256Hash(common.FromHex(hex)) -} - -func GetAccountsFromDiff(stateDiff statediff.StateObject) []statediff.StateNode { - return stateDiff.Nodes -} diff --git a/libraries/shared/storage/utils/diff_test.go b/libraries/shared/storage/utils/diff_test.go deleted file mode 100644 index 93ad31f8..00000000 --- a/libraries/shared/storage/utils/diff_test.go +++ /dev/null @@ -1,124 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package utils_test - -import ( - "math/big" - "math/rand" - - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/rlp" - "github.com/ethereum/go-ethereum/statediff" - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - "github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils" - "github.com/vulcanize/vulcanizedb/libraries/shared/test_data" - "github.com/vulcanize/vulcanizedb/pkg/eth/fakes" -) - -var _ = Describe("Storage row parsing", func() { - Describe("FromParityCsvRow", func() { - It("converts an array of strings to a row struct", func() { - contract := "0x123" - blockHash := "0x456" - blockHeight := "789" - storageKey := "0x987" - storageValue := "0x654" - data := []string{contract, blockHash, blockHeight, storageKey, storageValue} - - result, err := utils.FromParityCsvRow(data) - - Expect(err).NotTo(HaveOccurred()) - expectedKeccakOfContractAddress := utils.HexToKeccak256Hash(contract) - Expect(result.HashedAddress).To(Equal(expectedKeccakOfContractAddress)) - Expect(result.BlockHash).To(Equal(common.HexToHash(blockHash))) - Expect(result.BlockHeight).To(Equal(789)) - Expect(result.StorageKey).To(Equal(common.HexToHash(storageKey))) - Expect(result.StorageValue).To(Equal(common.HexToHash(storageValue))) - }) - - It("returns an error if row is missing data", func() { - _, err := utils.FromParityCsvRow([]string{"0x123"}) - - Expect(err).To(HaveOccurred()) - Expect(err).To(MatchError(utils.ErrRowMalformed{Length: 1})) - }) - - It("returns error if block height malformed", func() { - _, err := utils.FromParityCsvRow([]string{"", "", "", "", ""}) - - Expect(err).To(HaveOccurred()) - }) - }) - - Describe("FromGethStateDiff", func() { - var ( - accountDiff = statediff.StateNode{LeafKey: []byte{1, 2, 3, 4, 5, 6, 7, 8, 9, 0}} - stateDiff = &statediff.StateObject{ - BlockNumber: big.NewInt(rand.Int63()), - BlockHash: fakes.FakeHash, - } - ) - - It("adds relevant fields to diff", func() { - storageValueBytes := []byte{3} - storageValueRlp, encodeErr := rlp.EncodeToBytes(storageValueBytes) - Expect(encodeErr).NotTo(HaveOccurred()) - - storageDiff := statediff.StorageNode{ - LeafKey: []byte{0, 9, 8, 7, 6, 5, 4, 3, 2, 1}, - NodeValue: storageValueRlp, - NodeType: statediff.Leaf, - } - - result, err := utils.FromGethStateDiff(accountDiff, stateDiff, storageDiff) - Expect(err).NotTo(HaveOccurred()) - - expectedAddress := common.BytesToHash(accountDiff.LeafKey) - Expect(result.HashedAddress).To(Equal(expectedAddress)) - Expect(result.BlockHash).To(Equal(fakes.FakeHash)) - expectedBlockHeight := int(stateDiff.BlockNumber.Int64()) - Expect(result.BlockHeight).To(Equal(expectedBlockHeight)) - expectedStorageKey := common.BytesToHash(storageDiff.LeafKey) - Expect(result.StorageKey).To(Equal(expectedStorageKey)) - expectedStorageValue := common.BytesToHash(storageValueBytes) - Expect(result.StorageValue).To(Equal(expectedStorageValue)) - }) - - It("handles decoding large storage values from their RLP", func() { - storageValueBytes := []byte{1, 2, 3, 4, 5, 0, 9, 8, 7, 6} - storageValueRlp, encodeErr := rlp.EncodeToBytes(storageValueBytes) - Expect(encodeErr).NotTo(HaveOccurred()) - - storageDiff := statediff.StorageNode{ - LeafKey: []byte{0, 9, 8, 7, 6, 5, 4, 3, 2, 1}, - NodeValue: storageValueRlp, - NodeType: statediff.Leaf, - } - - result, err := utils.FromGethStateDiff(accountDiff, stateDiff, storageDiff) - Expect(err).NotTo(HaveOccurred()) - Expect(result.StorageValue).To(Equal(common.BytesToHash(storageValueBytes))) - }) - - It("returns an err if decoding the storage value Rlp fails", func() { - _, err := utils.FromGethStateDiff(accountDiff, stateDiff, test_data.StorageWithBadValue) - Expect(err).To(HaveOccurred()) - Expect(err).To(MatchError("rlp: input contains more than one value")) - }) - }) -}) diff --git a/libraries/shared/storage/utils/errors.go b/libraries/shared/storage/utils/errors.go deleted file mode 100644 index 0c87c642..00000000 --- a/libraries/shared/storage/utils/errors.go +++ /dev/null @@ -1,53 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package utils - -import ( - "fmt" -) - -type ErrContractNotFound struct { - Contract string -} - -func (e ErrContractNotFound) Error() string { - return fmt.Sprintf("transformer not found for contract: %s", e.Contract) -} - -type ErrMetadataMalformed struct { - MissingData Key -} - -func (e ErrMetadataMalformed) Error() string { - return fmt.Sprintf("storage metadata malformed: missing %s", e.MissingData) -} - -type ErrRowMalformed struct { - Length int -} - -func (e ErrRowMalformed) Error() string { - return fmt.Sprintf("storage row malformed: length %d, expected %d", e.Length, ExpectedRowLength) -} - -type ErrStorageKeyNotFound struct { - Key string -} - -func (e ErrStorageKeyNotFound) Error() string { - return fmt.Sprintf("unknown storage key: %s", e.Key) -} diff --git a/libraries/shared/storage/utils/keys_loader.go b/libraries/shared/storage/utils/keys_loader.go deleted file mode 100644 index e1252643..00000000 --- a/libraries/shared/storage/utils/keys_loader.go +++ /dev/null @@ -1,55 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package utils - -import ( - "math/big" - - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/crypto" -) - -const ( - IndexZero = "0000000000000000000000000000000000000000000000000000000000000000" - IndexOne = "0000000000000000000000000000000000000000000000000000000000000001" - IndexTwo = "0000000000000000000000000000000000000000000000000000000000000002" - IndexThree = "0000000000000000000000000000000000000000000000000000000000000003" - IndexFour = "0000000000000000000000000000000000000000000000000000000000000004" - IndexFive = "0000000000000000000000000000000000000000000000000000000000000005" - IndexSix = "0000000000000000000000000000000000000000000000000000000000000006" - IndexSeven = "0000000000000000000000000000000000000000000000000000000000000007" - IndexEight = "0000000000000000000000000000000000000000000000000000000000000008" - IndexNine = "0000000000000000000000000000000000000000000000000000000000000009" - IndexTen = "000000000000000000000000000000000000000000000000000000000000000a" - IndexEleven = "000000000000000000000000000000000000000000000000000000000000000b" -) - -func GetStorageKeyForMapping(indexOnContract, key string) common.Hash { - keyBytes := common.FromHex(key + indexOnContract) - return crypto.Keccak256Hash(keyBytes) -} - -func GetStorageKeyForNestedMapping(indexOnContract, primaryKey, secondaryKey string) common.Hash { - primaryMappingIndex := crypto.Keccak256(common.FromHex(primaryKey + indexOnContract)) - return crypto.Keccak256Hash(common.FromHex(secondaryKey), primaryMappingIndex) -} - -func GetIncrementedStorageKey(original common.Hash, incrementBy int64) common.Hash { - originalMappingAsInt := original.Big() - incremented := big.NewInt(0).Add(originalMappingAsInt, big.NewInt(incrementBy)) - return common.BytesToHash(incremented.Bytes()) -} diff --git a/libraries/shared/storage/utils/keys_loader_test.go b/libraries/shared/storage/utils/keys_loader_test.go deleted file mode 100644 index 51ad58fd..00000000 --- a/libraries/shared/storage/utils/keys_loader_test.go +++ /dev/null @@ -1,91 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package utils_test - -import ( - "github.com/ethereum/go-ethereum/common" - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - "github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils" -) - -var _ = Describe("Storage keys loader utils", func() { - Describe("GetStorageKeyForMapping", func() { - It("returns the storage key for a mapping when passed the mapping's index on the contract and the desired value's key", func() { - // ex. solidity: - // mapping (bytes32 => uint) public amounts - // to access amounts, pass in the index of the mapping on the contract + the bytes32 key for the uint val being looked up - indexOfMappingOnContract := utils.IndexZero - keyForDesiredValueInMapping := "1234567890abcdef" - - storageKey := utils.GetStorageKeyForMapping(indexOfMappingOnContract, keyForDesiredValueInMapping) - - expectedStorageKey := common.HexToHash("0xee0c1b59a3856bafbfb8730e7694c4badc271eb5f01ce4a8d7a53d8a6499676f") - Expect(storageKey).To(Equal(expectedStorageKey)) - }) - - It("returns same result if value includes hex prefix", func() { - indexOfMappingOnContract := utils.IndexZero - keyForDesiredValueInMapping := "0x1234567890abcdef" - - storageKey := utils.GetStorageKeyForMapping(indexOfMappingOnContract, keyForDesiredValueInMapping) - - expectedStorageKey := common.HexToHash("0xee0c1b59a3856bafbfb8730e7694c4badc271eb5f01ce4a8d7a53d8a6499676f") - Expect(storageKey).To(Equal(expectedStorageKey)) - }) - }) - - Describe("GetStorageKeyForNestedMapping", func() { - It("returns the storage key for a nested mapping when passed the mapping's index on the contract and the desired value's keys", func() { - // ex. solidity: - // mapping (bytes32 => uint) public amounts - // mapping (address => mapping (uint => bytes32)) public addressNames - // to access addressNames, pass in the index of the mapping on the contract + the address and uint keys for the bytes32 val being looked up - indexOfMappingOnContract := utils.IndexOne - keyForOuterMapping := "1234567890abcdef" - keyForInnerMapping := "123" - - storageKey := utils.GetStorageKeyForNestedMapping(indexOfMappingOnContract, keyForOuterMapping, keyForInnerMapping) - - expectedStorageKey := common.HexToHash("0x82113529f6cd61061d1a6f0de53f2bdd067a1addd3d2b46be50a99abfcdb1661") - Expect(storageKey).To(Equal(expectedStorageKey)) - }) - }) - - Describe("GetIncrementedStorageKey", func() { - It("returns the storage key for later values sharing an index on the contract with other earlier values", func() { - // ex. solidity: - // mapping (bytes32 => uint) public amounts - // mapping (address => mapping (uint => bytes32)) public addressNames - // struct Data { - // uint256 quantity; - // uint256 quality; - // } - // mapping (bytes32 => Data) public itemData; - // to access quality from itemData, pass in the storage key for the zero-indexed value (quantity) + the number of increments required. - // (For "quality", we must increment the storage key for the corresponding "quantity" by 1). - indexOfMappingOnContract := utils.IndexTwo - keyForDesiredValueInMapping := "1234567890abcdef" - storageKeyForFirstPropertyOnStruct := utils.GetStorageKeyForMapping(indexOfMappingOnContract, keyForDesiredValueInMapping) - - storageKey := utils.GetIncrementedStorageKey(storageKeyForFirstPropertyOnStruct, 1) - - expectedStorageKey := common.HexToHash("0x69b38749f0a8ed5d505c8474f7fb62c7828aad8a7627f1c67e07af1d2368cad4") - Expect(storageKey).To(Equal(expectedStorageKey)) - }) - }) -}) diff --git a/libraries/shared/storage/utils/keys_lookup.go b/libraries/shared/storage/utils/keys_lookup.go deleted file mode 100644 index fd2c1ee8..00000000 --- a/libraries/shared/storage/utils/keys_lookup.go +++ /dev/null @@ -1,37 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package utils - -import ( - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/crypto" -) - -func AddHashedKeys(currentMappings map[common.Hash]StorageValueMetadata) map[common.Hash]StorageValueMetadata { - copyOfCurrentMappings := make(map[common.Hash]StorageValueMetadata) - for k, v := range currentMappings { - copyOfCurrentMappings[k] = v - } - for k, v := range copyOfCurrentMappings { - currentMappings[hashKey(k)] = v - } - return currentMappings -} - -func hashKey(key common.Hash) common.Hash { - return crypto.Keccak256Hash(key.Bytes()) -} diff --git a/libraries/shared/storage/utils/keys_lookup_test.go b/libraries/shared/storage/utils/keys_lookup_test.go deleted file mode 100644 index 22ca16df..00000000 --- a/libraries/shared/storage/utils/keys_lookup_test.go +++ /dev/null @@ -1,47 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package utils_test - -import ( - "github.com/ethereum/go-ethereum/common" - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - "github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils" -) - -var _ = Describe("Storage keys lookup utils", func() { - Describe("AddHashedKeys", func() { - It("returns a copy of the map with an additional slot for the hashed version of every key", func() { - fakeMap := map[common.Hash]utils.StorageValueMetadata{} - fakeStorageKey := common.HexToHash("72c72de6b203d67cb6cd54fc93300109fcc6fd6eac88e390271a3d548794d800") - var fakeMappingKey utils.Key = "fakeKey" - fakeMetadata := utils.StorageValueMetadata{ - Name: "fakeName", - Keys: map[utils.Key]string{fakeMappingKey: "fakeValue"}, - Type: utils.Uint48, - } - fakeMap[fakeStorageKey] = fakeMetadata - - result := utils.AddHashedKeys(fakeMap) - - Expect(len(result)).To(Equal(2)) - expectedHashedStorageKey := common.HexToHash("2165edb4e1c37b99b60fa510d84f939dd35d5cd1d1c8f299d6456ea09df65a76") - Expect(fakeMap[fakeStorageKey]).To(Equal(fakeMetadata)) - Expect(fakeMap[expectedHashedStorageKey]).To(Equal(fakeMetadata)) - }) - }) -}) diff --git a/libraries/shared/storage/utils/utils_suite_test.go b/libraries/shared/storage/utils/utils_suite_test.go deleted file mode 100644 index 51a53ac6..00000000 --- a/libraries/shared/storage/utils/utils_suite_test.go +++ /dev/null @@ -1,36 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package utils_test - -import ( - "io/ioutil" - "testing" - - "github.com/sirupsen/logrus" - - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" -) - -func TestShared(t *testing.T) { - RegisterFailHandler(Fail) - RunSpecs(t, "Shared Storage Utils Suite") -} - -var _ = BeforeSuite(func() { - logrus.SetOutput(ioutil.Discard) -}) diff --git a/libraries/shared/storage/utils/value.go b/libraries/shared/storage/utils/value.go deleted file mode 100644 index c658b55f..00000000 --- a/libraries/shared/storage/utils/value.go +++ /dev/null @@ -1,69 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package utils - -import "fmt" - -type ValueType int - -const ( - Uint256 ValueType = iota - Uint48 - Uint128 - Bytes32 - Address - PackedSlot -) - -type Key string - -type StorageValueMetadata struct { - Name string - Keys map[Key]string - Type ValueType - PackedNames map[int]string //zero indexed position in map => name of packed item - PackedTypes map[int]ValueType //zero indexed position in map => type of packed item -} - -func GetStorageValueMetadata(name string, keys map[Key]string, valueType ValueType) StorageValueMetadata { - return getMetadata(name, keys, valueType, nil, nil) -} - -func GetStorageValueMetadataForPackedSlot(name string, keys map[Key]string, valueType ValueType, packedNames map[int]string, packedTypes map[int]ValueType) StorageValueMetadata { - return getMetadata(name, keys, valueType, packedNames, packedTypes) -} - -func getMetadata(name string, keys map[Key]string, valueType ValueType, packedNames map[int]string, packedTypes map[int]ValueType) StorageValueMetadata { - assertPackedSlotArgs(valueType, packedNames, packedTypes) - - return StorageValueMetadata{ - Name: name, - Keys: keys, - Type: valueType, - PackedNames: packedNames, - PackedTypes: packedTypes, - } -} - -func assertPackedSlotArgs(valueType ValueType, packedNames map[int]string, packedTypes map[int]ValueType) { - if valueType == PackedSlot && (packedTypes == nil || packedNames == nil) { - panic(fmt.Sprintf("ValueType is PackedSlot. Expected PackedNames and PackedTypes to not be nil, but got PackedNames = %v and PackedTypes = %v", packedNames, packedTypes)) - } else if (packedNames != nil && packedTypes != nil) && valueType != PackedSlot { - panic(fmt.Sprintf("PackedNames and PackedTypes passed in. Expected ValueType to equal PackedSlot (%v), but got %v.", PackedSlot, valueType)) - } - -} diff --git a/libraries/shared/storage/utils/value_test.go b/libraries/shared/storage/utils/value_test.go deleted file mode 100644 index bcfdc324..00000000 --- a/libraries/shared/storage/utils/value_test.go +++ /dev/null @@ -1,78 +0,0 @@ -package utils_test - -import ( - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - "github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils" -) - -var _ = Describe("Storage value metadata getter", func() { - It("returns storage value metadata for a single storage variable", func() { - metadataName := "fake_name" - metadataKeys := map[utils.Key]string{"key": "value"} - metadataType := utils.Uint256 - - expectedMetadata := utils.StorageValueMetadata{ - Name: metadataName, - Keys: metadataKeys, - Type: metadataType, - } - Expect(utils.GetStorageValueMetadata(metadataName, metadataKeys, metadataType)).To(Equal(expectedMetadata)) - }) - - Describe("metadata for a packed storaged slot", func() { - It("returns metadata for multiple storage variables", func() { - metadataName := "fake_name" - metadataKeys := map[utils.Key]string{"key": "value"} - metadataType := utils.PackedSlot - metadataPackedNames := map[int]string{0: "name"} - metadataPackedTypes := map[int]utils.ValueType{0: utils.Uint48} - - expectedMetadata := utils.StorageValueMetadata{ - Name: metadataName, - Keys: metadataKeys, - Type: metadataType, - PackedTypes: metadataPackedTypes, - PackedNames: metadataPackedNames, - } - Expect(utils.GetStorageValueMetadataForPackedSlot(metadataName, metadataKeys, metadataType, metadataPackedNames, metadataPackedTypes)).To(Equal(expectedMetadata)) - }) - - It("panics if PackedTypes are nil when the type is PackedSlot", func() { - metadataName := "fake_name" - metadataKeys := map[utils.Key]string{"key": "value"} - metadataType := utils.PackedSlot - metadataPackedNames := map[int]string{0: "name"} - - getMetadata := func() { - utils.GetStorageValueMetadataForPackedSlot(metadataName, metadataKeys, metadataType, metadataPackedNames, nil) - } - Expect(getMetadata).To(Panic()) - }) - - It("panics if PackedNames are nil when the type is PackedSlot", func() { - metadataName := "fake_name" - metadataKeys := map[utils.Key]string{"key": "value"} - metadataType := utils.PackedSlot - metadataPackedTypes := map[int]utils.ValueType{0: utils.Uint48} - - getMetadata := func() { - utils.GetStorageValueMetadataForPackedSlot(metadataName, metadataKeys, metadataType, nil, metadataPackedTypes) - } - Expect(getMetadata).To(Panic()) - }) - - It("panics if valueType is not PackedSlot if PackedNames is populated", func() { - metadataName := "fake_name" - metadataKeys := map[utils.Key]string{"key": "value"} - metadataType := utils.Uint48 - metadataPackedNames := map[int]string{0: "name"} - metadataPackedTypes := map[int]utils.ValueType{0: utils.Uint48} - - getMetadata := func() { - utils.GetStorageValueMetadataForPackedSlot(metadataName, metadataKeys, metadataType, metadataPackedNames, metadataPackedTypes) - } - Expect(getMetadata).To(Panic()) - }) - }) -}) diff --git a/libraries/shared/streamer/statediff_streamer.go b/libraries/shared/streamer/statediff_streamer.go deleted file mode 100644 index b5e371e0..00000000 --- a/libraries/shared/streamer/statediff_streamer.go +++ /dev/null @@ -1,48 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package streamer - -import ( - "github.com/ethereum/go-ethereum/rpc" - "github.com/ethereum/go-ethereum/statediff" - "github.com/sirupsen/logrus" - - "github.com/vulcanize/vulcanizedb/pkg/eth/core" -) - -// Streamer is the interface for streaming a statediff subscription -type Streamer interface { - Stream(payloadChan chan statediff.Payload, params statediff.Params) (*rpc.ClientSubscription, error) -} - -// StateDiffStreamer is the underlying struct for the StateDiffStreamer interface -type StateDiffStreamer struct { - Client core.RPCClient -} - -// NewStateDiffStreamer creates a pointer to a new StateDiffStreamer which satisfies the IStateDiffStreamer interface -func NewStateDiffStreamer(client core.RPCClient) Streamer { - return &StateDiffStreamer{ - Client: client, - } -} - -// Stream is the main loop for subscribing to data from the Geth state diff process -func (sds *StateDiffStreamer) Stream(payloadChan chan statediff.Payload, params statediff.Params) (*rpc.ClientSubscription, error) { - logrus.Info("streaming diffs from geth") - return sds.Client.Subscribe("statediff", payloadChan, "stream", params) -} diff --git a/libraries/shared/streamer/statediff_streamer_test.go b/libraries/shared/streamer/statediff_streamer_test.go deleted file mode 100644 index 425a27b7..00000000 --- a/libraries/shared/streamer/statediff_streamer_test.go +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright 2019 Vulcanize -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package streamer_test - -import ( - "github.com/ethereum/go-ethereum/statediff" - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - - "github.com/vulcanize/vulcanizedb/libraries/shared/streamer" - "github.com/vulcanize/vulcanizedb/pkg/eth/fakes" -) - -var _ = Describe("StateDiff Streamer", func() { - It("subscribes to the geth statediff service", func() { - client := &fakes.MockRPCClient{} - streamer := streamer.NewStateDiffStreamer(client) - payloadChan := make(chan statediff.Payload) - params := statediff.Params{ - IncludeBlock: true, - IncludeTD: true, - IncludeReceipts: true, - } - _, err := streamer.Stream(payloadChan, params) - Expect(err).NotTo(HaveOccurred()) - - client.AssertSubscribeCalledWith("statediff", payloadChan, []interface{}{"stream", params}) - }) -}) diff --git a/libraries/shared/streamer/streamer_suite_test.go b/libraries/shared/streamer/streamer_suite_test.go deleted file mode 100644 index fec1bc6c..00000000 --- a/libraries/shared/streamer/streamer_suite_test.go +++ /dev/null @@ -1,19 +0,0 @@ -package streamer_test - -import ( - "io/ioutil" - "testing" - - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - "github.com/sirupsen/logrus" -) - -func TestStreamer(t *testing.T) { - RegisterFailHandler(Fail) - RunSpecs(t, "Streamer Suite") -} - -var _ = BeforeSuite(func() { - logrus.SetOutput(ioutil.Discard) -}) diff --git a/libraries/shared/test_data/generic.go b/libraries/shared/test_data/generic.go deleted file mode 100644 index 8a876bca..00000000 --- a/libraries/shared/test_data/generic.go +++ /dev/null @@ -1,75 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package test_data - -import ( - "math/rand" - "time" - - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/common/hexutil" - "github.com/ethereum/go-ethereum/core/types" - "github.com/vulcanize/vulcanizedb/libraries/shared/factories/event" - "github.com/vulcanize/vulcanizedb/libraries/shared/transformer" -) - -var startingBlockNumber = rand.Int63() -var topic0 = "0x" + randomString(64) - -var GenericTestLog = func() types.Log { - return types.Log{ - Address: fakeAddress(), - Topics: []common.Hash{common.HexToHash(topic0), FakeHash()}, - Data: hexutil.MustDecode(FakeHash().Hex()), - BlockNumber: uint64(startingBlockNumber), - TxHash: FakeHash(), - TxIndex: uint(rand.Int31()), - BlockHash: FakeHash(), - Index: uint(rand.Int31()), - } -} - -var GenericModel = event.InsertionModel{} - -var GenericTestConfig = transformer.EventTransformerConfig{ - TransformerName: "generic-test-transformer", - ContractAddresses: []string{fakeAddress().Hex()}, - ContractAbi: randomString(100), - Topic: topic0, - StartingBlockNumber: startingBlockNumber, - EndingBlockNumber: startingBlockNumber + 1, -} - -func fakeAddress() common.Address { - return common.HexToAddress("0x" + randomString(40)) -} - -func FakeHash() common.Hash { - return common.HexToHash("0x" + randomString(64)) -} - -func randomString(length int) string { - var seededRand = rand.New( - rand.NewSource(time.Now().UnixNano())) - charset := "abcdef1234567890" - b := make([]byte, length) - for i := range b { - b[i] = charset[seededRand.Intn(len(charset))] - } - - return string(b) -} diff --git a/libraries/shared/test_data/statediff.go b/libraries/shared/test_data/statediff.go deleted file mode 100644 index 7a6f2a13..00000000 --- a/libraries/shared/test_data/statediff.go +++ /dev/null @@ -1,179 +0,0 @@ -// Copyright 2018 Vulcanize -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package test_data - -import ( - "math/big" - "math/rand" - - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/core/state" - "github.com/ethereum/go-ethereum/core/types" - "github.com/ethereum/go-ethereum/crypto" - "github.com/ethereum/go-ethereum/rlp" - "github.com/ethereum/go-ethereum/statediff" - "github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils" -) - -var ( - BlockNumber = big.NewInt(rand.Int63()) - BlockNumber2 = big.NewInt(0).Add(BlockNumber, big.NewInt(1)) - BlockHash = "0xfa40fbe2d98d98b3363a778d52f2bcd29d6790b9b3f3cab2b167fd12d3550f73" - BlockHash2 = "0xaa40fbe2d98d98b3363a778d52f2bcd29d6790b9b3f3cab2b167fd12d3550f72" - CodeHash = common.Hex2Bytes("0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470") - NewNonceValue = rand.Uint64() - NewBalanceValue = rand.Int63() - ContractRoot = common.HexToHash("0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421") - StoragePath = common.HexToHash("0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470").Bytes() - StorageKey = common.HexToHash("0000000000000000000000000000000000000000000000000000000000000001").Bytes() - SmallStorageValue = common.Hex2Bytes("03") - SmallStorageValueRlp, _ = rlp.EncodeToBytes(SmallStorageValue) - storageWithSmallValue = []statediff.StorageNode{{ - LeafKey: StorageKey, - NodeValue: SmallStorageValueRlp, - NodeType: statediff.Leaf, - Path: StoragePath, - }} - LargeStorageValue = common.Hex2Bytes("00191b53778c567b14b50ba0000") - LargeStorageValueRlp, _ = rlp.EncodeToBytes(LargeStorageValue) - storageWithLargeValue = []statediff.StorageNode{{ - LeafKey: StorageKey, - NodeValue: LargeStorageValueRlp, - Path: StoragePath, - NodeType: statediff.Leaf, - }} - StorageWithBadValue = statediff.StorageNode{ - LeafKey: StorageKey, - NodeValue: []byte{0, 1, 2}, - NodeType: statediff.Leaf, - Path: StoragePath, - // this storage value will fail to be decoded as an RLP with the following error message: - // "input contains more than one value" - } - contractAddress = common.HexToAddress("0xaE9BEa628c4Ce503DcFD7E305CaB4e29E7476592") - ContractLeafKey = crypto.Keccak256Hash(contractAddress[:]) - anotherContractAddress = common.HexToAddress("0xaE9BEa628c4Ce503DcFD7E305CaB4e29E7476593") - AnotherContractLeafKey = crypto.Keccak256Hash(anotherContractAddress[:]) - - testAccount = state.Account{ - Nonce: NewNonceValue, - Balance: big.NewInt(NewBalanceValue), - Root: ContractRoot, - CodeHash: CodeHash, - } - valueBytes, _ = rlp.EncodeToBytes(testAccount) - CreatedAccountDiffs = []statediff.StateNode{ - { - LeafKey: ContractLeafKey.Bytes(), - NodeValue: valueBytes, - StorageNodes: storageWithSmallValue, - }, - } - - UpdatedAccountDiffs = []statediff.StateNode{{ - LeafKey: AnotherContractLeafKey.Bytes(), - NodeValue: valueBytes, - StorageNodes: storageWithLargeValue, - }} - UpdatedAccountDiffs2 = []statediff.StateNode{{ - LeafKey: AnotherContractLeafKey.Bytes(), - NodeValue: valueBytes, - StorageNodes: storageWithSmallValue, - }} - - DeletedAccountDiffs = []statediff.StateNode{{ - LeafKey: AnotherContractLeafKey.Bytes(), - NodeValue: valueBytes, - StorageNodes: storageWithSmallValue, - }} - - MockStateDiff = statediff.StateObject{ - BlockNumber: BlockNumber, - BlockHash: common.HexToHash(BlockHash), - Nodes: append(append(CreatedAccountDiffs, UpdatedAccountDiffs...), DeletedAccountDiffs...), - } - MockStateDiff2 = statediff.StateObject{ - BlockNumber: BlockNumber2, - BlockHash: common.HexToHash(BlockHash2), - Nodes: UpdatedAccountDiffs2, - } - MockStateDiffBytes, _ = rlp.EncodeToBytes(MockStateDiff) - MockStateDiff2Bytes, _ = rlp.EncodeToBytes(MockStateDiff2) - - mockTransaction1 = types.NewTransaction(0, common.HexToAddress("0x0"), big.NewInt(1000), 50, big.NewInt(100), nil) - mockTransaction2 = types.NewTransaction(1, common.HexToAddress("0x1"), big.NewInt(2000), 100, big.NewInt(200), nil) - MockTransactions = types.Transactions{mockTransaction1, mockTransaction2} - - mockReceipt1 = types.NewReceipt(common.HexToHash("0x0").Bytes(), false, 50) - mockReceipt2 = types.NewReceipt(common.HexToHash("0x1").Bytes(), false, 100) - MockReceipts = types.Receipts{mockReceipt1, mockReceipt2} - - MockHeader = types.Header{ - Time: 0, - Number: BlockNumber, - Root: common.HexToHash("0x0"), - TxHash: common.HexToHash("0x0"), - ReceiptHash: common.HexToHash("0x0"), - } - MockHeader2 = types.Header{ - Time: 0, - Number: BlockNumber2, - Root: common.HexToHash("0x1"), - TxHash: common.HexToHash("0x1"), - ReceiptHash: common.HexToHash("0x1"), - } - MockBlock = types.NewBlock(&MockHeader, MockTransactions, nil, MockReceipts) - MockBlock2 = types.NewBlock(&MockHeader2, MockTransactions, nil, MockReceipts) - MockBlockRlp, _ = rlp.EncodeToBytes(MockBlock) - MockBlockRlp2, _ = rlp.EncodeToBytes(MockBlock2) - - MockStatediffPayload = statediff.Payload{ - BlockRlp: MockBlockRlp, - StateObjectRlp: MockStateDiffBytes, - } - MockStatediffPayload2 = statediff.Payload{ - BlockRlp: MockBlockRlp2, - StateObjectRlp: MockStateDiff2Bytes, - } - - CreatedExpectedStorageDiff = utils.StorageDiffInput{ - HashedAddress: common.BytesToHash(ContractLeafKey[:]), - BlockHash: common.HexToHash(BlockHash), - BlockHeight: int(BlockNumber.Int64()), - StorageKey: common.BytesToHash(StorageKey), - StorageValue: common.BytesToHash(SmallStorageValue), - } - UpdatedExpectedStorageDiff = utils.StorageDiffInput{ - HashedAddress: common.BytesToHash(AnotherContractLeafKey[:]), - BlockHash: common.HexToHash(BlockHash), - BlockHeight: int(BlockNumber.Int64()), - StorageKey: common.BytesToHash(StorageKey), - StorageValue: common.BytesToHash(LargeStorageValue), - } - UpdatedExpectedStorageDiff2 = utils.StorageDiffInput{ - HashedAddress: common.BytesToHash(AnotherContractLeafKey[:]), - BlockHash: common.HexToHash(BlockHash2), - BlockHeight: int(BlockNumber2.Int64()), - StorageKey: common.BytesToHash(StorageKey), - StorageValue: common.BytesToHash(SmallStorageValue), - } - DeletedExpectedStorageDiff = utils.StorageDiffInput{ - HashedAddress: common.BytesToHash(AnotherContractLeafKey[:]), - BlockHash: common.HexToHash(BlockHash), - BlockHeight: int(BlockNumber.Int64()), - StorageKey: common.BytesToHash(StorageKey), - StorageValue: common.BytesToHash(SmallStorageValue), - } -) diff --git a/libraries/shared/test_data/test_helpers.go b/libraries/shared/test_data/test_helpers.go deleted file mode 100644 index 2edb66f8..00000000 --- a/libraries/shared/test_data/test_helpers.go +++ /dev/null @@ -1,38 +0,0 @@ -package test_data - -import ( - "math/rand" - - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/core/types" - . "github.com/onsi/gomega" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" - "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres/repositories" - "github.com/vulcanize/vulcanizedb/pkg/postgres" -) - -// Create a header sync log to reference in an event, returning inserted header sync log -func CreateTestLog(headerID int64, db *postgres.DB) core.HeaderSyncLog { - log := types.Log{ - Address: common.Address{}, - Topics: nil, - Data: nil, - BlockNumber: 0, - TxHash: common.Hash{}, - TxIndex: uint(rand.Int31()), - BlockHash: common.Hash{}, - Index: 0, - Removed: false, - } - headerSyncLogRepository := repositories.NewHeaderSyncLogRepository(db) - insertLogsErr := headerSyncLogRepository.CreateHeaderSyncLogs(headerID, []types.Log{log}) - Expect(insertLogsErr).NotTo(HaveOccurred()) - headerSyncLogs, getLogsErr := headerSyncLogRepository.GetUntransformedHeaderSyncLogs() - Expect(getLogsErr).NotTo(HaveOccurred()) - for _, headerSyncLog := range headerSyncLogs { - if headerSyncLog.Log.TxIndex == log.TxIndex { - return headerSyncLog - } - } - panic("couldn't find inserted test log") -} diff --git a/libraries/shared/transactions/syncer.go b/libraries/shared/transactions/syncer.go deleted file mode 100644 index 7717331b..00000000 --- a/libraries/shared/transactions/syncer.go +++ /dev/null @@ -1,72 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package transactions - -import ( - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/core/types" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" - "github.com/vulcanize/vulcanizedb/pkg/eth/datastore" - "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres/repositories" - "github.com/vulcanize/vulcanizedb/pkg/postgres" -) - -type ITransactionsSyncer interface { - SyncTransactions(headerID int64, logs []types.Log) error -} - -type TransactionsSyncer struct { - BlockChain core.BlockChain - Repository datastore.HeaderRepository -} - -func NewTransactionsSyncer(db *postgres.DB, blockChain core.BlockChain) TransactionsSyncer { - repository := repositories.NewHeaderRepository(db) - return TransactionsSyncer{ - BlockChain: blockChain, - Repository: repository, - } -} - -func (syncer TransactionsSyncer) SyncTransactions(headerID int64, logs []types.Log) error { - transactionHashes := getUniqueTransactionHashes(logs) - if len(transactionHashes) < 1 { - return nil - } - transactions, transactionErr := syncer.BlockChain.GetTransactions(transactionHashes) - if transactionErr != nil { - return transactionErr - } - writeErr := syncer.Repository.CreateTransactions(headerID, transactions) - if writeErr != nil { - return writeErr - } - return nil -} - -func getUniqueTransactionHashes(logs []types.Log) []common.Hash { - seen := make(map[common.Hash]struct{}, len(logs)) - var result []common.Hash - for _, log := range logs { - if _, ok := seen[log.TxHash]; ok { - continue - } - seen[log.TxHash] = struct{}{} - result = append(result, log.TxHash) - } - return result -} diff --git a/libraries/shared/transactions/syncer_test.go b/libraries/shared/transactions/syncer_test.go deleted file mode 100644 index e0f79275..00000000 --- a/libraries/shared/transactions/syncer_test.go +++ /dev/null @@ -1,98 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package transactions_test - -import ( - "github.com/ethereum/go-ethereum/core/types" - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - "github.com/vulcanize/vulcanizedb/libraries/shared/transactions" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" - "github.com/vulcanize/vulcanizedb/pkg/eth/fakes" - "github.com/vulcanize/vulcanizedb/test_config" -) - -var _ = Describe("Transaction syncer", func() { - var ( - blockChain *fakes.MockBlockChain - syncer transactions.TransactionsSyncer - ) - - BeforeEach(func() { - db := test_config.NewTestDB(test_config.NewTestNode()) - test_config.CleanTestDB(db) - blockChain = fakes.NewMockBlockChain() - syncer = transactions.NewTransactionsSyncer(db, blockChain) - }) - - It("fetches transactions for logs", func() { - err := syncer.SyncTransactions(0, []types.Log{{TxHash: fakes.FakeHash}}) - - Expect(err).NotTo(HaveOccurred()) - Expect(blockChain.GetTransactionsCalled).To(BeTrue()) - }) - - It("does not fetch transactions if no logs", func() { - err := syncer.SyncTransactions(0, []types.Log{}) - - Expect(err).NotTo(HaveOccurred()) - Expect(blockChain.GetTransactionsCalled).To(BeFalse()) - }) - - It("only fetches transactions with unique hashes", func() { - err := syncer.SyncTransactions(0, []types.Log{{ - TxHash: fakes.FakeHash, - }, { - TxHash: fakes.FakeHash, - }}) - - Expect(err).NotTo(HaveOccurred()) - Expect(len(blockChain.GetTransactionsPassedHashes)).To(Equal(1)) - }) - - It("returns error if fetching transactions fails", func() { - blockChain.GetTransactionsError = fakes.FakeError - - err := syncer.SyncTransactions(0, []types.Log{{TxHash: fakes.FakeHash}}) - - Expect(err).To(HaveOccurred()) - Expect(err).To(MatchError(fakes.FakeError)) - }) - - It("passes transactions to repository for persistence", func() { - blockChain.Transactions = []core.TransactionModel{{}} - mockHeaderRepository := fakes.NewMockHeaderRepository() - syncer.Repository = mockHeaderRepository - - err := syncer.SyncTransactions(0, []types.Log{{TxHash: fakes.FakeHash}}) - - Expect(err).NotTo(HaveOccurred()) - Expect(mockHeaderRepository.CreateTransactionsCalled).To(BeTrue()) - }) - - It("returns error if persisting transactions fails", func() { - blockChain.Transactions = []core.TransactionModel{{}} - mockHeaderRepository := fakes.NewMockHeaderRepository() - mockHeaderRepository.CreateTransactionsError = fakes.FakeError - syncer.Repository = mockHeaderRepository - - err := syncer.SyncTransactions(0, []types.Log{{TxHash: fakes.FakeHash}}) - - Expect(err).To(HaveOccurred()) - Expect(err).To(MatchError(fakes.FakeError)) - }) -}) diff --git a/libraries/shared/transactions/transactions_suite_test.go b/libraries/shared/transactions/transactions_suite_test.go deleted file mode 100644 index ca012b1a..00000000 --- a/libraries/shared/transactions/transactions_suite_test.go +++ /dev/null @@ -1,29 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package transactions_test - -import ( - "testing" - - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" -) - -func TestTransactions(t *testing.T) { - RegisterFailHandler(Fail) - RunSpecs(t, "Shared Transactions Suite") -} diff --git a/libraries/shared/transformer/contract_transformer.go b/libraries/shared/transformer/contract_transformer.go deleted file mode 100644 index 98547c88..00000000 --- a/libraries/shared/transformer/contract_transformer.go +++ /dev/null @@ -1,31 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package transformer - -import ( - "github.com/vulcanize/vulcanizedb/pkg/config" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" - "github.com/vulcanize/vulcanizedb/pkg/postgres" -) - -type ContractTransformer interface { - Init() error - Execute() error - GetConfig() config.ContractConfig -} - -type ContractTransformerInitializer func(db *postgres.DB, bc core.BlockChain) ContractTransformer diff --git a/libraries/shared/transformer/event_transformer.go b/libraries/shared/transformer/event_transformer.go deleted file mode 100644 index a724340c..00000000 --- a/libraries/shared/transformer/event_transformer.go +++ /dev/null @@ -1,56 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package transformer - -import ( - "github.com/ethereum/go-ethereum/common" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" - "github.com/vulcanize/vulcanizedb/pkg/postgres" -) - -type EventTransformer interface { - Execute(logs []core.HeaderSyncLog) error - GetConfig() EventTransformerConfig -} - -type EventTransformerInitializer func(db *postgres.DB) EventTransformer - -type EventTransformerConfig struct { - TransformerName string - ContractAddresses []string - ContractAbi string - Topic string - StartingBlockNumber int64 - EndingBlockNumber int64 // Set -1 for indefinite transformer -} - -func HexToInt64(byteString string) int64 { - value := common.HexToHash(byteString) - return value.Big().Int64() -} - -func HexToString(byteString string) string { - value := common.HexToHash(byteString) - return value.Big().String() -} - -func HexStringsToAddresses(strings []string) (addresses []common.Address) { - for _, hexString := range strings { - addresses = append(addresses, common.HexToAddress(hexString)) - } - return -} diff --git a/libraries/shared/transformer/storage_transformer.go b/libraries/shared/transformer/storage_transformer.go deleted file mode 100644 index f2b91ac5..00000000 --- a/libraries/shared/transformer/storage_transformer.go +++ /dev/null @@ -1,30 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package transformer - -import ( - "github.com/ethereum/go-ethereum/common" - "github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils" - "github.com/vulcanize/vulcanizedb/pkg/postgres" -) - -type StorageTransformer interface { - Execute(diff utils.PersistedStorageDiff) error - KeccakContractAddress() common.Hash -} - -type StorageTransformerInitializer func(db *postgres.DB) StorageTransformer diff --git a/libraries/shared/transformer/super_node_transformer.go b/libraries/shared/transformer/super_node_transformer.go deleted file mode 100644 index bda8b6a1..00000000 --- a/libraries/shared/transformer/super_node_transformer.go +++ /dev/null @@ -1,31 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package transformer - -import ( - "github.com/vulcanize/vulcanizedb/pkg/eth/core" - "github.com/vulcanize/vulcanizedb/pkg/postgres" - "github.com/vulcanize/vulcanizedb/pkg/super_node/shared" -) - -type SuperNodeTransformer interface { - Init() error - Execute() error - GetConfig() shared.SubscriptionSettings -} - -type SuperNodeTransformerInitializer func(db *postgres.DB, subCon shared.SubscriptionSettings, client core.RPCClient) SuperNodeTransformer diff --git a/libraries/shared/utilities/utils.go b/libraries/shared/utilities/utils.go deleted file mode 100644 index 3f586213..00000000 --- a/libraries/shared/utilities/utils.go +++ /dev/null @@ -1,77 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package utilities - -import ( - "errors" - - "github.com/vulcanize/vulcanizedb/pkg/super_node/shared" -) - -// GetBlockHeightBins splits a block range up into bins of block heights of the given batch size -func GetBlockHeightBins(startingBlock, endingBlock, batchSize uint64) ([][]uint64, error) { - if endingBlock < startingBlock { - return nil, errors.New("backfill: ending block number needs to be greater than starting block number") - } - if batchSize == 0 { - return nil, errors.New("backfill: batchsize needs to be greater than zero") - } - length := endingBlock - startingBlock + 1 - numberOfBins := length / batchSize - remainder := length % batchSize - if remainder != 0 { - numberOfBins++ - } - blockRangeBins := make([][]uint64, numberOfBins) - for i := range blockRangeBins { - nextBinStart := startingBlock + batchSize - blockRange := make([]uint64, 0, nextBinStart-startingBlock+1) - for j := startingBlock; j < nextBinStart && j <= endingBlock; j++ { - blockRange = append(blockRange, j) - } - startingBlock = nextBinStart - blockRangeBins[i] = blockRange - } - return blockRangeBins, nil -} - -// MissingHeightsToGaps returns a slice of gaps from a slice of missing block heights -func MissingHeightsToGaps(heights []uint64) []shared.Gap { - if len(heights) == 0 { - return nil - } - validationGaps := make([]shared.Gap, 0) - start := heights[0] - lastHeight := start - for i, height := range heights[1:] { - if height != lastHeight+1 { - validationGaps = append(validationGaps, shared.Gap{ - Start: start, - Stop: lastHeight, - }) - start = height - } - if i+2 == len(heights) { - validationGaps = append(validationGaps, shared.Gap{ - Start: start, - Stop: height, - }) - } - lastHeight = height - } - return validationGaps -} diff --git a/libraries/shared/watcher/contract_watcher.go b/libraries/shared/watcher/contract_watcher.go deleted file mode 100644 index 607dad40..00000000 --- a/libraries/shared/watcher/contract_watcher.go +++ /dev/null @@ -1,76 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -// Contract watcher is built with a more generic interface -// that allows offloading more of the operational logic to -// the transformers, allowing them to act more dynamically -// Built to work primarily with the contract_watcher packaging -package watcher - -import ( - "fmt" - - "github.com/sirupsen/logrus" - - "github.com/vulcanize/vulcanizedb/libraries/shared/transformer" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" - "github.com/vulcanize/vulcanizedb/pkg/postgres" -) - -type ContractWatcher struct { - Transformers []transformer.ContractTransformer - DB *postgres.DB - BlockChain core.BlockChain -} - -func NewContractWatcher(db *postgres.DB, bc core.BlockChain) ContractWatcher { - return ContractWatcher{ - DB: db, - BlockChain: bc, - } -} - -func (watcher *ContractWatcher) AddTransformers(inits interface{}) error { - initializers, ok := inits.([]transformer.ContractTransformerInitializer) - if !ok { - return fmt.Errorf("initializers of type %T, not %T", inits, []transformer.ContractTransformerInitializer{}) - } - - for _, initializer := range initializers { - t := initializer(watcher.DB, watcher.BlockChain) - watcher.Transformers = append(watcher.Transformers, t) - } - - for _, contractTransformer := range watcher.Transformers { - err := contractTransformer.Init() - if err != nil { - logrus.Print("Unable to initialize transformer:", contractTransformer.GetConfig().Name, err) - return err - } - } - return nil -} - -func (watcher *ContractWatcher) Execute() error { - for _, contractTransformer := range watcher.Transformers { - err := contractTransformer.Execute() - if err != nil { - logrus.Error("Unable to execute transformer:", contractTransformer.GetConfig().Name, err) - return err - } - } - return nil -} diff --git a/libraries/shared/watcher/event_watcher.go b/libraries/shared/watcher/event_watcher.go deleted file mode 100644 index 57fc12b4..00000000 --- a/libraries/shared/watcher/event_watcher.go +++ /dev/null @@ -1,127 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package watcher - -import ( - "time" - - "github.com/sirupsen/logrus" - - "github.com/vulcanize/vulcanizedb/libraries/shared/chunker" - "github.com/vulcanize/vulcanizedb/libraries/shared/constants" - "github.com/vulcanize/vulcanizedb/libraries/shared/fetcher" - "github.com/vulcanize/vulcanizedb/libraries/shared/logs" - "github.com/vulcanize/vulcanizedb/libraries/shared/transactions" - "github.com/vulcanize/vulcanizedb/libraries/shared/transformer" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" - "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres/repositories" - "github.com/vulcanize/vulcanizedb/pkg/postgres" -) - -const NoNewDataPause = time.Second * 7 - -type EventWatcher struct { - blockChain core.BlockChain - db *postgres.DB - LogDelegator logs.ILogDelegator - LogExtractor logs.ILogExtractor -} - -func NewEventWatcher(db *postgres.DB, bc core.BlockChain) EventWatcher { - extractor := &logs.LogExtractor{ - CheckedHeadersRepository: repositories.NewCheckedHeadersRepository(db), - Fetcher: fetcher.NewLogFetcher(bc), - LogRepository: repositories.NewHeaderSyncLogRepository(db), - Syncer: transactions.NewTransactionsSyncer(db, bc), - } - logTransformer := &logs.LogDelegator{ - Chunker: chunker.NewLogChunker(), - LogRepository: repositories.NewHeaderSyncLogRepository(db), - } - return EventWatcher{ - blockChain: bc, - db: db, - LogExtractor: extractor, - LogDelegator: logTransformer, - } -} - -// Adds transformers to the watcher so that their logs will be extracted and delegated. -func (watcher *EventWatcher) AddTransformers(initializers []transformer.EventTransformerInitializer) error { - for _, initializer := range initializers { - t := initializer(watcher.db) - - watcher.LogDelegator.AddTransformer(t) - err := watcher.LogExtractor.AddTransformerConfig(t.GetConfig()) - if err != nil { - return err - } - } - return nil -} - -// Extracts and delegates watched log events. -func (watcher *EventWatcher) Execute(recheckHeaders constants.TransformerExecution) error { - delegateErrsChan := make(chan error) - extractErrsChan := make(chan error) - defer close(delegateErrsChan) - defer close(extractErrsChan) - - go watcher.extractLogs(recheckHeaders, extractErrsChan) - go watcher.delegateLogs(delegateErrsChan) - - for { - select { - case delegateErr := <-delegateErrsChan: - logrus.Errorf("error delegating logs in event watcher: %s", delegateErr.Error()) - return delegateErr - case extractErr := <-extractErrsChan: - logrus.Errorf("error extracting logs in event watcher: %s", extractErr.Error()) - return extractErr - } - } -} - -func (watcher *EventWatcher) extractLogs(recheckHeaders constants.TransformerExecution, errs chan error) { - err := watcher.LogExtractor.ExtractLogs(recheckHeaders) - if err != nil && err != logs.ErrNoUncheckedHeaders { - errs <- err - return - } - - if err == logs.ErrNoUncheckedHeaders { - time.Sleep(NoNewDataPause) - watcher.extractLogs(recheckHeaders, errs) - } else { - watcher.extractLogs(recheckHeaders, errs) - } -} - -func (watcher *EventWatcher) delegateLogs(errs chan error) { - err := watcher.LogDelegator.DelegateLogs() - if err != nil && err != logs.ErrNoLogs { - errs <- err - return - } - - if err == logs.ErrNoLogs { - time.Sleep(NoNewDataPause) - watcher.delegateLogs(errs) - } else { - watcher.delegateLogs(errs) - } -} diff --git a/libraries/shared/watcher/event_watcher_test.go b/libraries/shared/watcher/event_watcher_test.go deleted file mode 100644 index ba2a1ff3..00000000 --- a/libraries/shared/watcher/event_watcher_test.go +++ /dev/null @@ -1,179 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package watcher_test - -import ( - "errors" - - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - "github.com/vulcanize/vulcanizedb/libraries/shared/constants" - "github.com/vulcanize/vulcanizedb/libraries/shared/logs" - "github.com/vulcanize/vulcanizedb/libraries/shared/mocks" - "github.com/vulcanize/vulcanizedb/libraries/shared/transformer" - "github.com/vulcanize/vulcanizedb/libraries/shared/watcher" - "github.com/vulcanize/vulcanizedb/pkg/eth/fakes" -) - -var errExecuteClosed = errors.New("this error means the mocks were finished executing") - -var _ = Describe("Event Watcher", func() { - var ( - delegator *mocks.MockLogDelegator - extractor *mocks.MockLogExtractor - eventWatcher *watcher.EventWatcher - ) - - BeforeEach(func() { - delegator = &mocks.MockLogDelegator{} - extractor = &mocks.MockLogExtractor{} - eventWatcher = &watcher.EventWatcher{ - LogDelegator: delegator, - LogExtractor: extractor, - } - }) - - Describe("AddTransformers", func() { - var ( - fakeTransformerOne, fakeTransformerTwo *mocks.MockEventTransformer - ) - - BeforeEach(func() { - fakeTransformerOne = &mocks.MockEventTransformer{} - fakeTransformerOne.SetTransformerConfig(mocks.FakeTransformerConfig) - fakeTransformerTwo = &mocks.MockEventTransformer{} - fakeTransformerTwo.SetTransformerConfig(mocks.FakeTransformerConfig) - initializers := []transformer.EventTransformerInitializer{ - fakeTransformerOne.FakeTransformerInitializer, - fakeTransformerTwo.FakeTransformerInitializer, - } - - err := eventWatcher.AddTransformers(initializers) - Expect(err).NotTo(HaveOccurred()) - }) - - It("adds initialized transformer to log delegator", func() { - expectedTransformers := []transformer.EventTransformer{ - fakeTransformerOne, - fakeTransformerTwo, - } - Expect(delegator.AddedTransformers).To(Equal(expectedTransformers)) - }) - - It("adds transformer config to log extractor", func() { - expectedConfigs := []transformer.EventTransformerConfig{ - mocks.FakeTransformerConfig, - mocks.FakeTransformerConfig, - } - Expect(extractor.AddedConfigs).To(Equal(expectedConfigs)) - }) - }) - - Describe("Execute", func() { - - It("extracts watched logs", func(done Done) { - delegator.DelegateErrors = []error{logs.ErrNoLogs} - extractor.ExtractLogsErrors = []error{nil, errExecuteClosed} - - err := eventWatcher.Execute(constants.HeaderUnchecked) - - Expect(err).To(MatchError(errExecuteClosed)) - Eventually(func() bool { - return extractor.ExtractLogsCount > 0 - }).Should(BeTrue()) - close(done) - }) - - It("returns error if extracting logs fails", func(done Done) { - delegator.DelegateErrors = []error{logs.ErrNoLogs} - extractor.ExtractLogsErrors = []error{fakes.FakeError} - - err := eventWatcher.Execute(constants.HeaderUnchecked) - - Expect(err).To(MatchError(fakes.FakeError)) - close(done) - }) - - It("extracts watched logs again if missing headers found", func(done Done) { - delegator.DelegateErrors = []error{logs.ErrNoLogs} - extractor.ExtractLogsErrors = []error{nil, errExecuteClosed} - - err := eventWatcher.Execute(constants.HeaderUnchecked) - - Expect(err).To(MatchError(errExecuteClosed)) - Eventually(func() bool { - return extractor.ExtractLogsCount > 1 - }).Should(BeTrue()) - close(done) - }) - - It("returns error if extracting logs fails on subsequent run", func(done Done) { - delegator.DelegateErrors = []error{logs.ErrNoLogs} - extractor.ExtractLogsErrors = []error{nil, fakes.FakeError} - - err := eventWatcher.Execute(constants.HeaderUnchecked) - - Expect(err).To(MatchError(fakes.FakeError)) - close(done) - }) - - It("delegates untransformed logs", func() { - delegator.DelegateErrors = []error{nil, errExecuteClosed} - extractor.ExtractLogsErrors = []error{logs.ErrNoUncheckedHeaders} - - err := eventWatcher.Execute(constants.HeaderUnchecked) - - Expect(err).To(MatchError(errExecuteClosed)) - Eventually(func() bool { - return delegator.DelegateCallCount > 0 - }).Should(BeTrue()) - }) - - It("returns error if delegating logs fails", func(done Done) { - delegator.DelegateErrors = []error{fakes.FakeError} - extractor.ExtractLogsErrors = []error{logs.ErrNoUncheckedHeaders} - - err := eventWatcher.Execute(constants.HeaderUnchecked) - - Expect(err).To(MatchError(fakes.FakeError)) - close(done) - }) - - It("delegates logs again if untransformed logs found", func(done Done) { - delegator.DelegateErrors = []error{nil, nil, nil, errExecuteClosed} - extractor.ExtractLogsErrors = []error{logs.ErrNoUncheckedHeaders} - - err := eventWatcher.Execute(constants.HeaderUnchecked) - - Expect(err).To(MatchError(errExecuteClosed)) - Eventually(func() bool { - return delegator.DelegateCallCount > 1 - }).Should(BeTrue()) - close(done) - }) - - It("returns error if delegating logs fails on subsequent run", func(done Done) { - delegator.DelegateErrors = []error{nil, fakes.FakeError} - extractor.ExtractLogsErrors = []error{logs.ErrNoUncheckedHeaders} - - err := eventWatcher.Execute(constants.HeaderUnchecked) - - Expect(err).To(MatchError(fakes.FakeError)) - close(done) - }) - }) -}) diff --git a/libraries/shared/watcher/storage_watcher.go b/libraries/shared/watcher/storage_watcher.go deleted file mode 100644 index b7fc0109..00000000 --- a/libraries/shared/watcher/storage_watcher.go +++ /dev/null @@ -1,168 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package watcher - -import ( - "fmt" - "time" - - "github.com/ethereum/go-ethereum/common" - "github.com/sirupsen/logrus" - - "github.com/vulcanize/vulcanizedb/libraries/shared/fetcher" - "github.com/vulcanize/vulcanizedb/libraries/shared/storage" - "github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils" - "github.com/vulcanize/vulcanizedb/libraries/shared/transformer" - "github.com/vulcanize/vulcanizedb/pkg/eth/datastore" - "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres/repositories" - "github.com/vulcanize/vulcanizedb/pkg/postgres" -) - -type IStorageWatcher interface { - AddTransformers(initializers []transformer.StorageTransformerInitializer) - Execute(queueRecheckInterval time.Duration, backFillOn bool) - BackFill(startingBlock uint64, backFiller storage.BackFiller) -} - -type StorageWatcher struct { - db *postgres.DB - StorageFetcher fetcher.IStorageFetcher - Queue storage.IStorageQueue - StorageDiffRepository datastore.StorageDiffRepository - KeccakAddressTransformers map[common.Hash]transformer.StorageTransformer // keccak hash of an address => transformer - DiffsChan chan utils.StorageDiffInput - ErrsChan chan error - BackFillDoneChan chan bool - StartingSyncBlockChan chan uint64 -} - -func NewStorageWatcher(f fetcher.IStorageFetcher, db *postgres.DB) *StorageWatcher { - queue := storage.NewStorageQueue(db) - storageDiffRepository := repositories.NewStorageDiffRepository(db) - transformers := make(map[common.Hash]transformer.StorageTransformer) - return &StorageWatcher{ - db: db, - StorageFetcher: f, - DiffsChan: make(chan utils.StorageDiffInput, fetcher.PayloadChanBufferSize), - ErrsChan: make(chan error), - StartingSyncBlockChan: make(chan uint64), - BackFillDoneChan: make(chan bool), - Queue: queue, - StorageDiffRepository: storageDiffRepository, - KeccakAddressTransformers: transformers, - } -} - -func (storageWatcher *StorageWatcher) AddTransformers(initializers []transformer.StorageTransformerInitializer) { - for _, initializer := range initializers { - storageTransformer := initializer(storageWatcher.db) - storageWatcher.KeccakAddressTransformers[storageTransformer.KeccakContractAddress()] = storageTransformer - } -} - -// BackFill uses a backFiller to backfill missing storage diffs for the storageWatcher -func (storageWatcher *StorageWatcher) BackFill(startingBlock uint64, backFiller storage.BackFiller) { - // this blocks until the Execute process sends us the first block number it sees - endBackFillBlock := <-storageWatcher.StartingSyncBlockChan - backFillInitErr := backFiller.BackFill(startingBlock, endBackFillBlock, - storageWatcher.DiffsChan, storageWatcher.ErrsChan, storageWatcher.BackFillDoneChan) - if backFillInitErr != nil { - logrus.Warn(backFillInitErr) - } -} - -// Execute runs the StorageWatcher processes -func (storageWatcher *StorageWatcher) Execute(queueRecheckInterval time.Duration, backFillOn bool) { - ticker := time.NewTicker(queueRecheckInterval) - go storageWatcher.StorageFetcher.FetchStorageDiffs(storageWatcher.DiffsChan, storageWatcher.ErrsChan) - start := true - for { - select { - case err := <-storageWatcher.ErrsChan: - logrus.Warn(fmt.Sprintf("error fetching storage diffs: %s", err.Error())) - case diff := <-storageWatcher.DiffsChan: - if start && backFillOn { - storageWatcher.StartingSyncBlockChan <- uint64(diff.BlockHeight - 1) - start = false - } - storageWatcher.processRow(diff) - case <-ticker.C: - storageWatcher.processQueue() - case <-storageWatcher.BackFillDoneChan: - logrus.Info("storage watcher backfill process has finished") - } - } -} - -func (storageWatcher *StorageWatcher) getTransformer(diff utils.PersistedStorageDiff) (transformer.StorageTransformer, bool) { - storageTransformer, ok := storageWatcher.KeccakAddressTransformers[diff.HashedAddress] - return storageTransformer, ok -} - -func (storageWatcher StorageWatcher) processRow(diffInput utils.StorageDiffInput) { - diffID, err := storageWatcher.StorageDiffRepository.CreateStorageDiff(diffInput) - if err != nil { - if err == repositories.ErrDuplicateDiff { - logrus.Warn("ignoring duplicate diff") - return - } - logrus.Warnf("failed to persist storage diff: %s", err.Error()) - // TODO: bail? Should we continue attempting to transform a diff we didn't persist - } - persistedDiff := utils.ToPersistedDiff(diffInput, diffID) - storageTransformer, ok := storageWatcher.getTransformer(persistedDiff) - if !ok { - logrus.Debug("ignoring diff from unwatched contract") - return - } - executeErr := storageTransformer.Execute(persistedDiff) - if executeErr != nil { - logrus.Warn(fmt.Sprintf("error executing storage transformer: %s", executeErr)) - queueErr := storageWatcher.Queue.Add(persistedDiff) - if queueErr != nil { - logrus.Warn(fmt.Sprintf("error queueing storage diff: %s", queueErr)) - } - return - } - logrus.Debugf("Storage diff persisted at block height: %d", diffInput.BlockHeight) -} - -func (storageWatcher StorageWatcher) processQueue() { - diffs, fetchErr := storageWatcher.Queue.GetAll() - if fetchErr != nil { - logrus.Warn(fmt.Sprintf("error getting queued storage: %s", fetchErr)) - } - for _, diff := range diffs { - storageTransformer, ok := storageWatcher.getTransformer(diff) - if !ok { - // delete diff from queue if address no longer watched - storageWatcher.deleteRow(diff.ID) - continue - } - executeErr := storageTransformer.Execute(diff) - if executeErr == nil { - storageWatcher.deleteRow(diff.ID) - } - } -} - -func (storageWatcher StorageWatcher) deleteRow(diffID int64) { - deleteErr := storageWatcher.Queue.Delete(diffID) - if deleteErr != nil { - logrus.Warn(fmt.Sprintf("error deleting persisted diff from queue: %s", deleteErr)) - } -} diff --git a/libraries/shared/watcher/storage_watcher_test.go b/libraries/shared/watcher/storage_watcher_test.go deleted file mode 100644 index c6bc4625..00000000 --- a/libraries/shared/watcher/storage_watcher_test.go +++ /dev/null @@ -1,530 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package watcher_test - -import ( - "errors" - "io/ioutil" - "math/rand" - "os" - "time" - - "github.com/ethereum/go-ethereum/common" - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - "github.com/sirupsen/logrus" - - "github.com/vulcanize/vulcanizedb/libraries/shared/mocks" - "github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils" - "github.com/vulcanize/vulcanizedb/libraries/shared/test_data" - "github.com/vulcanize/vulcanizedb/libraries/shared/transformer" - "github.com/vulcanize/vulcanizedb/libraries/shared/watcher" - "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres/repositories" - "github.com/vulcanize/vulcanizedb/pkg/eth/fakes" - "github.com/vulcanize/vulcanizedb/test_config" -) - -var _ = Describe("Storage Watcher", func() { - var ( - mockFetcher *mocks.StorageFetcher - mockQueue *mocks.MockStorageQueue - mockTransformer *mocks.MockStorageTransformer - storageWatcher *watcher.StorageWatcher - mockStorageDiffRepository *fakes.MockStorageDiffRepository - fakeDiffId = rand.Int63() - hashedAddress = utils.HexToKeccak256Hash("0x0123456789abcdef") - csvDiff utils.StorageDiffInput - ) - Describe("AddTransformer", func() { - It("adds transformers", func() { - fakeHashedAddress := utils.HexToKeccak256Hash("0x12345") - fakeTransformer := &mocks.MockStorageTransformer{KeccakOfAddress: fakeHashedAddress} - w := watcher.NewStorageWatcher(mocks.NewStorageFetcher(), test_config.NewTestDB(test_config.NewTestNode())) - - w.AddTransformers([]transformer.StorageTransformerInitializer{fakeTransformer.FakeTransformerInitializer}) - - Expect(w.KeccakAddressTransformers[fakeHashedAddress]).To(Equal(fakeTransformer)) - }) - }) - Describe("Execute", func() { - BeforeEach(func() { - mockFetcher = mocks.NewStorageFetcher() - mockQueue = &mocks.MockStorageQueue{} - mockStorageDiffRepository = &fakes.MockStorageDiffRepository{} - mockTransformer = &mocks.MockStorageTransformer{KeccakOfAddress: hashedAddress} - csvDiff = utils.StorageDiffInput{ - HashedAddress: hashedAddress, - BlockHash: common.HexToHash("0xfedcba9876543210"), - BlockHeight: 0, - StorageKey: common.HexToHash("0xabcdef1234567890"), - StorageValue: common.HexToHash("0x9876543210abcdef"), - } - }) - - It("logs error if fetching storage diffs fails", func(done Done) { - mockFetcher.ErrsToReturn = []error{fakes.FakeError} - storageWatcher = watcher.NewStorageWatcher(mockFetcher, test_config.NewTestDB(test_config.NewTestNode())) - storageWatcher.Queue = mockQueue - storageWatcher.AddTransformers([]transformer.StorageTransformerInitializer{mockTransformer.FakeTransformerInitializer}) - storageWatcher.StorageDiffRepository = mockStorageDiffRepository - tempFile, fileErr := ioutil.TempFile("", "log") - Expect(fileErr).NotTo(HaveOccurred()) - defer os.Remove(tempFile.Name()) - logrus.SetOutput(tempFile) - - go storageWatcher.Execute(time.Hour, false) - - Eventually(func() (string, error) { - logContent, err := ioutil.ReadFile(tempFile.Name()) - return string(logContent), err - }).Should(ContainSubstring(fakes.FakeError.Error())) - close(done) - }) - - Describe("transforming new storage diffs from csv", func() { - var fakePersistedDiff utils.PersistedStorageDiff - BeforeEach(func() { - mockFetcher.DiffsToReturn = []utils.StorageDiffInput{csvDiff} - storageWatcher = watcher.NewStorageWatcher(mockFetcher, test_config.NewTestDB(test_config.NewTestNode())) - storageWatcher.Queue = mockQueue - storageWatcher.AddTransformers([]transformer.StorageTransformerInitializer{mockTransformer.FakeTransformerInitializer}) - fakePersistedDiff = utils.PersistedStorageDiff{ - ID: fakeDiffId, - StorageDiffInput: utils.StorageDiffInput{ - HashedAddress: csvDiff.HashedAddress, - BlockHash: csvDiff.BlockHash, - BlockHeight: csvDiff.BlockHeight, - StorageValue: csvDiff.StorageValue, - StorageKey: csvDiff.StorageKey, - }, - } - mockStorageDiffRepository.CreateReturnID = fakeDiffId - storageWatcher.StorageDiffRepository = mockStorageDiffRepository - }) - - It("writes raw diff before processing", func(done Done) { - go storageWatcher.Execute(time.Hour, false) - - Eventually(func() []utils.StorageDiffInput { - return mockStorageDiffRepository.CreatePassedInputs - }).Should(ContainElement(csvDiff)) - close(done) - }) - - It("discards raw diff if it's already been persisted", func(done Done) { - mockStorageDiffRepository.CreateReturnError = repositories.ErrDuplicateDiff - - go storageWatcher.Execute(time.Hour, false) - - Consistently(func() []utils.PersistedStorageDiff { - return mockTransformer.PassedDiffs - }).Should(BeZero()) - close(done) - }) - - It("logs error if persisting raw diff fails", func(done Done) { - mockStorageDiffRepository.CreateReturnError = fakes.FakeError - tempFile, fileErr := ioutil.TempFile("", "log") - Expect(fileErr).NotTo(HaveOccurred()) - defer os.Remove(tempFile.Name()) - logrus.SetOutput(tempFile) - - go storageWatcher.Execute(time.Hour, false) - - Eventually(func() (string, error) { - logContent, err := ioutil.ReadFile(tempFile.Name()) - return string(logContent), err - }).Should(ContainSubstring(fakes.FakeError.Error())) - close(done) - }) - - It("executes transformer for recognized storage diff", func(done Done) { - go storageWatcher.Execute(time.Hour, false) - - Eventually(func() []utils.PersistedStorageDiff { - return mockTransformer.PassedDiffs - }).Should(Equal([]utils.PersistedStorageDiff{ - fakePersistedDiff, - })) - close(done) - }) - - It("queues diff for later processing if transformer execution fails", func(done Done) { - mockTransformer.ExecuteErr = fakes.FakeError - - go storageWatcher.Execute(time.Hour, false) - - Eventually(func() bool { - return mockQueue.AddCalled - }).Should(BeTrue()) - Eventually(func() utils.PersistedStorageDiff { - if len(mockQueue.AddPassedDiffs) > 0 { - return mockQueue.AddPassedDiffs[0] - } - return utils.PersistedStorageDiff{} - }).Should(Equal(fakePersistedDiff)) - close(done) - }) - - It("logs error if queueing diff fails", func(done Done) { - mockTransformer.ExecuteErr = utils.ErrStorageKeyNotFound{} - mockQueue.AddError = fakes.FakeError - tempFile, fileErr := ioutil.TempFile("", "log") - Expect(fileErr).NotTo(HaveOccurred()) - defer os.Remove(tempFile.Name()) - logrus.SetOutput(tempFile) - - go storageWatcher.Execute(time.Hour, false) - - Eventually(func() bool { - return mockQueue.AddCalled - }).Should(BeTrue()) - Eventually(func() (string, error) { - logContent, err := ioutil.ReadFile(tempFile.Name()) - return string(logContent), err - }).Should(ContainSubstring(fakes.FakeError.Error())) - close(done) - }) - }) - - Describe("transforming queued storage diffs", func() { - var queuedDiff utils.PersistedStorageDiff - BeforeEach(func() { - queuedDiff = utils.PersistedStorageDiff{ - ID: 1337, - StorageDiffInput: utils.StorageDiffInput{ - HashedAddress: hashedAddress, - BlockHash: test_data.FakeHash(), - BlockHeight: rand.Int(), - StorageKey: test_data.FakeHash(), - StorageValue: test_data.FakeHash(), - }, - } - mockQueue.DiffsToReturn = []utils.PersistedStorageDiff{queuedDiff} - storageWatcher = watcher.NewStorageWatcher(mockFetcher, test_config.NewTestDB(test_config.NewTestNode())) - storageWatcher.Queue = mockQueue - storageWatcher.AddTransformers([]transformer.StorageTransformerInitializer{mockTransformer.FakeTransformerInitializer}) - }) - - It("executes transformer for storage diff", func(done Done) { - go storageWatcher.Execute(time.Nanosecond, false) - - Eventually(func() utils.PersistedStorageDiff { - if len(mockTransformer.PassedDiffs) > 0 { - return mockTransformer.PassedDiffs[0] - } - return utils.PersistedStorageDiff{} - }).Should(Equal(queuedDiff)) - close(done) - }) - - It("deletes diff from queue if transformer execution successful", func(done Done) { - go storageWatcher.Execute(time.Nanosecond, false) - - Eventually(func() int64 { - if len(mockQueue.DeletePassedIds) > 0 { - return mockQueue.DeletePassedIds[0] - } - return 0 - }).Should(Equal(queuedDiff.ID)) - close(done) - }) - - It("logs error if deleting persisted diff fails", func(done Done) { - mockQueue.DeleteErr = fakes.FakeError - tempFile, fileErr := ioutil.TempFile("", "log") - Expect(fileErr).NotTo(HaveOccurred()) - defer os.Remove(tempFile.Name()) - logrus.SetOutput(tempFile) - - go storageWatcher.Execute(time.Nanosecond, false) - - Eventually(func() (string, error) { - logContent, err := ioutil.ReadFile(tempFile.Name()) - return string(logContent), err - }).Should(ContainSubstring(fakes.FakeError.Error())) - close(done) - }) - - It("deletes obsolete diff from queue if contract not recognized", func(done Done) { - obsoleteDiff := utils.PersistedStorageDiff{ - ID: queuedDiff.ID + 1, - StorageDiffInput: utils.StorageDiffInput{HashedAddress: test_data.FakeHash()}, - } - mockQueue.DiffsToReturn = []utils.PersistedStorageDiff{obsoleteDiff} - - go storageWatcher.Execute(time.Nanosecond, false) - - Eventually(func() int64 { - if len(mockQueue.DeletePassedIds) > 0 { - return mockQueue.DeletePassedIds[0] - } - return 0 - }).Should(Equal(obsoleteDiff.ID)) - close(done) - }) - - It("logs error if deleting obsolete diff fails", func(done Done) { - obsoleteDiff := utils.PersistedStorageDiff{ - ID: queuedDiff.ID + 1, - StorageDiffInput: utils.StorageDiffInput{HashedAddress: test_data.FakeHash()}, - } - mockQueue.DiffsToReturn = []utils.PersistedStorageDiff{obsoleteDiff} - mockQueue.DeleteErr = fakes.FakeError - tempFile, fileErr := ioutil.TempFile("", "log") - Expect(fileErr).NotTo(HaveOccurred()) - defer os.Remove(tempFile.Name()) - logrus.SetOutput(tempFile) - - go storageWatcher.Execute(time.Nanosecond, false) - - Eventually(func() (string, error) { - logContent, err := ioutil.ReadFile(tempFile.Name()) - return string(logContent), err - }).Should(ContainSubstring(fakes.FakeError.Error())) - close(done) - }) - }) - }) - - Describe("BackFill", func() { - var ( - mockBackFiller *mocks.BackFiller - mockTransformer2 *mocks.MockStorageTransformer - mockTransformer3 *mocks.MockStorageTransformer - createdPersistedDiff = utils.PersistedStorageDiff{ - ID: fakeDiffId, - StorageDiffInput: test_data.CreatedExpectedStorageDiff, - } - updatedPersistedDiff1 = utils.PersistedStorageDiff{ - ID: fakeDiffId, - StorageDiffInput: test_data.UpdatedExpectedStorageDiff, - } - deletedPersistedDiff = utils.PersistedStorageDiff{ - ID: fakeDiffId, - StorageDiffInput: test_data.DeletedExpectedStorageDiff, - } - updatedPersistedDiff2 = utils.PersistedStorageDiff{ - ID: fakeDiffId, - StorageDiffInput: test_data.UpdatedExpectedStorageDiff2, - } - csvDiff = utils.StorageDiffInput{ - HashedAddress: hashedAddress, - BlockHash: common.HexToHash("0xfedcba9876543210"), - BlockHeight: int(test_data.BlockNumber2.Int64()) + 1, - StorageKey: common.HexToHash("0xabcdef1234567890"), - StorageValue: common.HexToHash("0x9876543210abcdef"), - } - csvPersistedDiff = utils.PersistedStorageDiff{ - ID: fakeDiffId, - StorageDiffInput: csvDiff, - } - ) - - BeforeEach(func() { - mockBackFiller = new(mocks.BackFiller) - hashedAddress = utils.HexToKeccak256Hash("0x0123456789abcdef") - mockFetcher = mocks.NewStorageFetcher() - mockQueue = &mocks.MockStorageQueue{} - mockTransformer = &mocks.MockStorageTransformer{KeccakOfAddress: hashedAddress} - mockTransformer2 = &mocks.MockStorageTransformer{KeccakOfAddress: common.BytesToHash(test_data.ContractLeafKey[:])} - mockTransformer3 = &mocks.MockStorageTransformer{KeccakOfAddress: common.BytesToHash(test_data.AnotherContractLeafKey[:])} - mockStorageDiffRepository = &fakes.MockStorageDiffRepository{} - }) - - Describe("transforming streamed and backfilled storage diffs", func() { - BeforeEach(func() { - mockFetcher.DiffsToReturn = []utils.StorageDiffInput{csvDiff} - mockBackFiller.SetStorageDiffsToReturn([]utils.StorageDiffInput{ - test_data.CreatedExpectedStorageDiff, - test_data.UpdatedExpectedStorageDiff, - test_data.DeletedExpectedStorageDiff, - test_data.UpdatedExpectedStorageDiff2, - }) - mockQueue.DiffsToReturn = []utils.PersistedStorageDiff{} - storageWatcher = watcher.NewStorageWatcher(mockFetcher, test_config.NewTestDB(test_config.NewTestNode())) - storageWatcher.Queue = mockQueue - storageWatcher.AddTransformers([]transformer.StorageTransformerInitializer{ - mockTransformer.FakeTransformerInitializer, - mockTransformer2.FakeTransformerInitializer, - mockTransformer3.FakeTransformerInitializer, - }) - mockStorageDiffRepository.CreateReturnID = fakeDiffId - - storageWatcher.StorageDiffRepository = mockStorageDiffRepository - }) - - It("executes transformer for storage diffs received from fetcher and backfiller", func(done Done) { - go storageWatcher.BackFill(test_data.BlockNumber.Uint64(), mockBackFiller) - go storageWatcher.Execute(time.Hour, true) - - Eventually(func() int { - return len(mockTransformer.PassedDiffs) - }).Should(Equal(1)) - Eventually(func() int { - return len(mockTransformer2.PassedDiffs) - }).Should(Equal(1)) - Eventually(func() int { - return len(mockTransformer3.PassedDiffs) - }).Should(Equal(3)) - - Expect(mockBackFiller.PassedEndingBlock).To(Equal(uint64(test_data.BlockNumber2.Int64()))) - Expect(mockTransformer.PassedDiffs[0]).To(Equal(csvPersistedDiff)) - Expect(mockTransformer2.PassedDiffs[0]).To(Equal(createdPersistedDiff)) - Expect(mockTransformer3.PassedDiffs).To(ConsistOf(updatedPersistedDiff1, deletedPersistedDiff, updatedPersistedDiff2)) - close(done) - }) - - It("adds diffs to the queue if transformation fails", func(done Done) { - mockTransformer3.ExecuteErr = fakes.FakeError - go storageWatcher.BackFill(test_data.BlockNumber.Uint64(), mockBackFiller) - go storageWatcher.Execute(time.Hour, true) - - Eventually(func() int { - return len(mockTransformer.PassedDiffs) - }).Should(Equal(1)) - Eventually(func() int { - return len(mockTransformer2.PassedDiffs) - }).Should(Equal(1)) - Eventually(func() int { - return len(mockTransformer3.PassedDiffs) - }).Should(Equal(3)) - - Eventually(func() bool { - return mockQueue.AddCalled - }).Should(BeTrue()) - Eventually(func() []utils.PersistedStorageDiff { - if len(mockQueue.AddPassedDiffs) > 2 { - return mockQueue.AddPassedDiffs - } - return []utils.PersistedStorageDiff{} - }).Should(ConsistOf(updatedPersistedDiff1, deletedPersistedDiff, updatedPersistedDiff2)) - - Expect(mockBackFiller.PassedEndingBlock).To(Equal(uint64(test_data.BlockNumber2.Int64()))) - Expect(mockTransformer.PassedDiffs[0]).To(Equal(csvPersistedDiff)) - Expect(mockTransformer2.PassedDiffs[0]).To(Equal(createdPersistedDiff)) - Expect(mockTransformer3.PassedDiffs).To(ConsistOf(updatedPersistedDiff1, deletedPersistedDiff, updatedPersistedDiff2)) - close(done) - }) - - It("logs a backfill error", func(done Done) { - tempFile, fileErr := ioutil.TempFile("", "log") - Expect(fileErr).NotTo(HaveOccurred()) - defer os.Remove(tempFile.Name()) - logrus.SetOutput(tempFile) - - mockBackFiller.BackFillErrs = []error{ - nil, - nil, - nil, - errors.New("mock backfiller error"), - } - - go storageWatcher.BackFill(test_data.BlockNumber.Uint64(), mockBackFiller) - go storageWatcher.Execute(time.Hour, true) - - Eventually(func() int { - return len(mockTransformer.PassedDiffs) - }).Should(Equal(1)) - Eventually(func() int { - return len(mockTransformer2.PassedDiffs) - }).Should(Equal(1)) - Eventually(func() int { - return len(mockTransformer3.PassedDiffs) - }).Should(Equal(2)) - Eventually(func() (string, error) { - logContent, err := ioutil.ReadFile(tempFile.Name()) - return string(logContent), err - }).Should(ContainSubstring("mock backfiller error")) - close(done) - }) - - It("logs when backfill finishes", func(done Done) { - tempFile, fileErr := ioutil.TempFile("", "log") - Expect(fileErr).NotTo(HaveOccurred()) - defer os.Remove(tempFile.Name()) - logrus.SetOutput(tempFile) - - go storageWatcher.BackFill(test_data.BlockNumber.Uint64(), mockBackFiller) - go storageWatcher.Execute(time.Hour, true) - - Eventually(func() (string, error) { - logContent, err := ioutil.ReadFile(tempFile.Name()) - return string(logContent), err - }).Should(ContainSubstring("storage watcher backfill process has finished")) - close(done) - }) - }) - - Describe("transforms queued storage diffs", func() { - BeforeEach(func() { - mockQueue.DiffsToReturn = []utils.PersistedStorageDiff{ - csvPersistedDiff, - createdPersistedDiff, - updatedPersistedDiff1, - deletedPersistedDiff, - updatedPersistedDiff2, - } - storageWatcher = watcher.NewStorageWatcher(mockFetcher, test_config.NewTestDB(test_config.NewTestNode())) - storageWatcher.Queue = mockQueue - storageWatcher.AddTransformers([]transformer.StorageTransformerInitializer{ - mockTransformer.FakeTransformerInitializer, - mockTransformer2.FakeTransformerInitializer, - mockTransformer3.FakeTransformerInitializer, - }) - }) - - It("executes transformers on queued storage diffs", func(done Done) { - go storageWatcher.BackFill(test_data.BlockNumber.Uint64(), mockBackFiller) - go storageWatcher.Execute(time.Nanosecond, true) - - Eventually(func() int { - return len(mockTransformer.PassedDiffs) - }).Should(Equal(1)) - Eventually(func() int { - return len(mockTransformer2.PassedDiffs) - }).Should(Equal(1)) - Eventually(func() int { - return len(mockTransformer3.PassedDiffs) - }).Should(Equal(3)) - Eventually(func() bool { - return mockQueue.GetAllCalled - }).Should(BeTrue()) - expectedIDs := []int64{ - fakeDiffId, - fakeDiffId, - fakeDiffId, - fakeDiffId, - fakeDiffId, - } - Eventually(func() []int64 { - if len(mockQueue.DeletePassedIds) > 4 { - return mockQueue.DeletePassedIds - } - return []int64{} - }).Should(Equal(expectedIDs)) - - Expect(mockQueue.AddCalled).To(Not(BeTrue())) - Expect(len(mockQueue.DiffsToReturn)).To(Equal(0)) - Expect(mockTransformer.PassedDiffs[0]).To(Equal(csvPersistedDiff)) - Expect(mockTransformer2.PassedDiffs[0]).To(Equal(createdPersistedDiff)) - Expect(mockTransformer3.PassedDiffs).To(ConsistOf(updatedPersistedDiff1, deletedPersistedDiff, updatedPersistedDiff2)) - close(done) - }) - }) - }) -}) diff --git a/libraries/shared/watcher/watcher_suite_test.go b/libraries/shared/watcher/watcher_suite_test.go deleted file mode 100644 index f39d6770..00000000 --- a/libraries/shared/watcher/watcher_suite_test.go +++ /dev/null @@ -1,35 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package watcher_test - -import ( - "io/ioutil" - "testing" - - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - "github.com/sirupsen/logrus" -) - -func TestShared(t *testing.T) { - RegisterFailHandler(Fail) - RunSpecs(t, "Shared Watcher Suite") -} - -var _ = BeforeSuite(func() { - logrus.SetOutput(ioutil.Discard) -}) diff --git a/main.go b/main.go index ca125800..e85f6dce 100644 --- a/main.go +++ b/main.go @@ -1,7 +1,7 @@ package main import ( - "github.com/vulcanize/vulcanizedb/cmd" + "github.com/vulcanize/ipfs-chain-watcher/cmd" "github.com/sirupsen/logrus" ) diff --git a/pkg/super_node/btc/btc_suite_test.go b/pkg/btc/btc_suite_test.go similarity index 100% rename from pkg/super_node/btc/btc_suite_test.go rename to pkg/btc/btc_suite_test.go diff --git a/pkg/super_node/btc/cid_retriever.go b/pkg/btc/cid_retriever.go similarity index 98% rename from pkg/super_node/btc/cid_retriever.go rename to pkg/btc/cid_retriever.go index f097d642..9e3f4b2b 100644 --- a/pkg/super_node/btc/cid_retriever.go +++ b/pkg/btc/cid_retriever.go @@ -26,9 +26,9 @@ import ( "github.com/lib/pq" log "github.com/sirupsen/logrus" - utils "github.com/vulcanize/vulcanizedb/libraries/shared/utilities" - "github.com/vulcanize/vulcanizedb/pkg/postgres" - "github.com/vulcanize/vulcanizedb/pkg/super_node/shared" + "github.com/vulcanize/ipfs-chain-watcher/pkg/postgres" + "github.com/vulcanize/ipfs-chain-watcher/pkg/shared" + "github.com/vulcanize/ipfs-chain-watcher/utils" ) // CIDRetriever satisfies the CIDRetriever interface for bitcoin diff --git a/pkg/super_node/btc/cleaner.go b/pkg/btc/cleaner.go similarity index 97% rename from pkg/super_node/btc/cleaner.go rename to pkg/btc/cleaner.go index 98591765..1b358c66 100644 --- a/pkg/super_node/btc/cleaner.go +++ b/pkg/btc/cleaner.go @@ -22,8 +22,8 @@ import ( "github.com/jmoiron/sqlx" "github.com/sirupsen/logrus" - "github.com/vulcanize/vulcanizedb/pkg/postgres" - "github.com/vulcanize/vulcanizedb/pkg/super_node/shared" + "github.com/vulcanize/ipfs-chain-watcher/pkg/postgres" + "github.com/vulcanize/ipfs-chain-watcher/pkg/shared" ) // Cleaner satisfies the shared.Cleaner interface fo bitcoin diff --git a/pkg/super_node/btc/cleaner_test.go b/pkg/btc/cleaner_test.go similarity index 95% rename from pkg/super_node/btc/cleaner_test.go rename to pkg/btc/cleaner_test.go index 86efd607..e3fd1e07 100644 --- a/pkg/super_node/btc/cleaner_test.go +++ b/pkg/btc/cleaner_test.go @@ -23,21 +23,19 @@ import ( . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" - "github.com/vulcanize/vulcanizedb/pkg/postgres" - "github.com/vulcanize/vulcanizedb/pkg/super_node/btc" - "github.com/vulcanize/vulcanizedb/pkg/super_node/shared" + "github.com/vulcanize/ipfs-chain-watcher/pkg/btc" + "github.com/vulcanize/ipfs-chain-watcher/pkg/postgres" + "github.com/vulcanize/ipfs-chain-watcher/pkg/shared" ) var ( // Block 0 // header variables - blockHash1 = crypto.Keccak256Hash([]byte{00, 02}) - blocKNumber1 = big.NewInt(0) - headerCid1 = "mockHeader1CID" - parentHash = crypto.Keccak256Hash([]byte{00, 01}) - totalDifficulty = "50000000000000000000" - reward = "5000000000000000000" - headerModel1 = btc.HeaderModel{ + blockHash1 = crypto.Keccak256Hash([]byte{00, 02}) + blocKNumber1 = big.NewInt(0) + headerCid1 = "mockHeader1CID" + parentHash = crypto.Keccak256Hash([]byte{00, 01}) + headerModel1 = btc.HeaderModel{ BlockHash: blockHash1.String(), BlockNumber: blocKNumber1.String(), ParentHash: parentHash.String(), diff --git a/pkg/super_node/btc/converter.go b/pkg/btc/converter.go similarity index 98% rename from pkg/super_node/btc/converter.go rename to pkg/btc/converter.go index 7559fa17..9f279478 100644 --- a/pkg/super_node/btc/converter.go +++ b/pkg/btc/converter.go @@ -23,7 +23,7 @@ import ( "github.com/btcsuite/btcd/chaincfg" "github.com/btcsuite/btcd/txscript" - "github.com/vulcanize/vulcanizedb/pkg/super_node/shared" + "github.com/vulcanize/ipfs-chain-watcher/pkg/shared" ) // PayloadConverter satisfies the PayloadConverter interface for bitcoin diff --git a/pkg/super_node/btc/converter_test.go b/pkg/btc/converter_test.go similarity index 93% rename from pkg/super_node/btc/converter_test.go rename to pkg/btc/converter_test.go index c76ad6c6..2735bae7 100644 --- a/pkg/super_node/btc/converter_test.go +++ b/pkg/btc/converter_test.go @@ -21,8 +21,8 @@ import ( . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" - "github.com/vulcanize/vulcanizedb/pkg/super_node/btc" - "github.com/vulcanize/vulcanizedb/pkg/super_node/btc/mocks" + "github.com/vulcanize/ipfs-chain-watcher/pkg/btc" + "github.com/vulcanize/ipfs-chain-watcher/pkg/btc/mocks" ) var _ = Describe("Converter", func() { diff --git a/pkg/super_node/btc/filterer.go b/pkg/btc/filterer.go similarity index 96% rename from pkg/super_node/btc/filterer.go rename to pkg/btc/filterer.go index 5fa55d68..6b21d0f2 100644 --- a/pkg/super_node/btc/filterer.go +++ b/pkg/btc/filterer.go @@ -23,9 +23,9 @@ import ( "github.com/multiformats/go-multihash" - "github.com/vulcanize/vulcanizedb/pkg/ipfs" - "github.com/vulcanize/vulcanizedb/pkg/ipfs/ipld" - "github.com/vulcanize/vulcanizedb/pkg/super_node/shared" + "github.com/vulcanize/ipfs-chain-watcher/pkg/ipfs" + "github.com/vulcanize/ipfs-chain-watcher/pkg/ipfs/ipld" + "github.com/vulcanize/ipfs-chain-watcher/pkg/shared" ) // ResponseFilterer satisfies the ResponseFilterer interface for bitcoin diff --git a/pkg/super_node/btc/http_streamer.go b/pkg/btc/http_streamer.go similarity index 98% rename from pkg/super_node/btc/http_streamer.go rename to pkg/btc/http_streamer.go index a9085f83..f655c296 100644 --- a/pkg/super_node/btc/http_streamer.go +++ b/pkg/btc/http_streamer.go @@ -23,7 +23,7 @@ import ( "github.com/btcsuite/btcd/rpcclient" "github.com/sirupsen/logrus" - "github.com/vulcanize/vulcanizedb/pkg/super_node/shared" + "github.com/vulcanize/ipfs-chain-watcher/pkg/shared" ) // HTTPPayloadStreamer satisfies the PayloadStreamer interface for bitcoin over http endpoints (since bitcoin core doesn't support websockets) diff --git a/pkg/super_node/btc/indexer.go b/pkg/btc/indexer.go similarity index 97% rename from pkg/super_node/btc/indexer.go rename to pkg/btc/indexer.go index ed869320..30c02a6d 100644 --- a/pkg/super_node/btc/indexer.go +++ b/pkg/btc/indexer.go @@ -24,8 +24,8 @@ import ( "github.com/jmoiron/sqlx" "github.com/lib/pq" - "github.com/vulcanize/vulcanizedb/pkg/postgres" - "github.com/vulcanize/vulcanizedb/pkg/super_node/shared" + "github.com/vulcanize/ipfs-chain-watcher/pkg/postgres" + "github.com/vulcanize/ipfs-chain-watcher/pkg/shared" ) type CIDIndexer struct { diff --git a/pkg/super_node/btc/indexer_test.go b/pkg/btc/indexer_test.go similarity index 93% rename from pkg/super_node/btc/indexer_test.go rename to pkg/btc/indexer_test.go index 67daec73..4b7202b7 100644 --- a/pkg/super_node/btc/indexer_test.go +++ b/pkg/btc/indexer_test.go @@ -20,10 +20,10 @@ import ( . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" - "github.com/vulcanize/vulcanizedb/pkg/postgres" - "github.com/vulcanize/vulcanizedb/pkg/super_node/btc" - "github.com/vulcanize/vulcanizedb/pkg/super_node/btc/mocks" - "github.com/vulcanize/vulcanizedb/pkg/super_node/shared" + "github.com/vulcanize/ipfs-chain-watcher/pkg/btc" + "github.com/vulcanize/ipfs-chain-watcher/pkg/btc/mocks" + "github.com/vulcanize/ipfs-chain-watcher/pkg/postgres" + "github.com/vulcanize/ipfs-chain-watcher/pkg/shared" ) var _ = Describe("Indexer", func() { diff --git a/pkg/super_node/btc/ipld_fetcher.go b/pkg/btc/ipld_fetcher.go similarity index 97% rename from pkg/super_node/btc/ipld_fetcher.go rename to pkg/btc/ipld_fetcher.go index 06dccc71..328247bd 100644 --- a/pkg/super_node/btc/ipld_fetcher.go +++ b/pkg/btc/ipld_fetcher.go @@ -26,8 +26,8 @@ import ( "github.com/ipfs/go-cid" log "github.com/sirupsen/logrus" - "github.com/vulcanize/vulcanizedb/pkg/ipfs" - "github.com/vulcanize/vulcanizedb/pkg/super_node/shared" + "github.com/vulcanize/ipfs-chain-watcher/pkg/ipfs" + "github.com/vulcanize/ipfs-chain-watcher/pkg/shared" ) var ( diff --git a/pkg/super_node/btc/ipld_pg_fetcher.go b/pkg/btc/ipld_pg_fetcher.go similarity index 94% rename from pkg/super_node/btc/ipld_pg_fetcher.go rename to pkg/btc/ipld_pg_fetcher.go index 53ea63e3..4bcd5aa7 100644 --- a/pkg/super_node/btc/ipld_pg_fetcher.go +++ b/pkg/btc/ipld_pg_fetcher.go @@ -22,9 +22,9 @@ import ( "github.com/jmoiron/sqlx" log "github.com/sirupsen/logrus" - "github.com/vulcanize/vulcanizedb/pkg/ipfs" - "github.com/vulcanize/vulcanizedb/pkg/postgres" - "github.com/vulcanize/vulcanizedb/pkg/super_node/shared" + "github.com/vulcanize/ipfs-chain-watcher/pkg/ipfs" + "github.com/vulcanize/ipfs-chain-watcher/pkg/postgres" + "github.com/vulcanize/ipfs-chain-watcher/pkg/shared" ) // IPLDPGFetcher satisfies the IPLDFetcher interface for ethereum diff --git a/pkg/super_node/btc/mocks/converter.go b/pkg/btc/mocks/converter.go similarity index 95% rename from pkg/super_node/btc/mocks/converter.go rename to pkg/btc/mocks/converter.go index 5ba7a096..b31a0f92 100644 --- a/pkg/super_node/btc/mocks/converter.go +++ b/pkg/btc/mocks/converter.go @@ -19,8 +19,8 @@ package mocks import ( "fmt" - "github.com/vulcanize/vulcanizedb/pkg/super_node/btc" - "github.com/vulcanize/vulcanizedb/pkg/super_node/shared" + "github.com/vulcanize/ipfs-chain-watcher/pkg/btc" + "github.com/vulcanize/ipfs-chain-watcher/pkg/shared" ) // PayloadConverter is the underlying struct for the Converter interface diff --git a/pkg/super_node/btc/mocks/indexer.go b/pkg/btc/mocks/indexer.go similarity index 83% rename from pkg/super_node/btc/mocks/indexer.go rename to pkg/btc/mocks/indexer.go index 699b7076..d971bf1d 100644 --- a/pkg/super_node/btc/mocks/indexer.go +++ b/pkg/btc/mocks/indexer.go @@ -19,11 +19,8 @@ package mocks import ( "fmt" - "github.com/vulcanize/vulcanizedb/pkg/super_node/btc" - - "github.com/vulcanize/vulcanizedb/pkg/super_node/shared" - - "github.com/vulcanize/vulcanizedb/pkg/super_node/eth" + "github.com/vulcanize/ipfs-chain-watcher/pkg/btc" + "github.com/vulcanize/ipfs-chain-watcher/pkg/shared" ) // CIDIndexer is the underlying struct for the Indexer interface @@ -36,7 +33,7 @@ type CIDIndexer struct { func (repo *CIDIndexer) Index(cids shared.CIDsForIndexing) error { cidPayload, ok := cids.(*btc.CIDPayload) if !ok { - return fmt.Errorf("index expected cids type %T got %T", ð.CIDPayload{}, cids) + return fmt.Errorf("index expected cids type %T got %T", &btc.CIDPayload{}, cids) } repo.PassedCIDPayload = append(repo.PassedCIDPayload, cidPayload) return repo.ReturnErr diff --git a/pkg/super_node/btc/mocks/publisher.go b/pkg/btc/mocks/publisher.go similarity index 95% rename from pkg/super_node/btc/mocks/publisher.go rename to pkg/btc/mocks/publisher.go index c9a7cc59..3f204d25 100644 --- a/pkg/super_node/btc/mocks/publisher.go +++ b/pkg/btc/mocks/publisher.go @@ -19,9 +19,9 @@ package mocks import ( "fmt" - "github.com/vulcanize/vulcanizedb/pkg/super_node/btc" + "github.com/vulcanize/ipfs-chain-watcher/pkg/btc" - "github.com/vulcanize/vulcanizedb/pkg/super_node/shared" + "github.com/vulcanize/ipfs-chain-watcher/pkg/shared" ) // IPLDPublisher is the underlying struct for the Publisher interface diff --git a/pkg/super_node/btc/mocks/test_data.go b/pkg/btc/mocks/test_data.go similarity index 99% rename from pkg/super_node/btc/mocks/test_data.go rename to pkg/btc/mocks/test_data.go index 0d5645bf..f9e6f10e 100644 --- a/pkg/super_node/btc/mocks/test_data.go +++ b/pkg/btc/mocks/test_data.go @@ -25,7 +25,8 @@ import ( "github.com/btcsuite/btcd/txscript" "github.com/btcsuite/btcd/wire" "github.com/btcsuite/btcutil" - "github.com/vulcanize/vulcanizedb/pkg/super_node/btc" + + "github.com/vulcanize/ipfs-chain-watcher/pkg/btc" ) var ( diff --git a/pkg/super_node/btc/models.go b/pkg/btc/models.go similarity index 100% rename from pkg/super_node/btc/models.go rename to pkg/btc/models.go diff --git a/pkg/super_node/btc/payload_fetcher.go b/pkg/btc/payload_fetcher.go similarity index 97% rename from pkg/super_node/btc/payload_fetcher.go rename to pkg/btc/payload_fetcher.go index 80d63a93..66450883 100644 --- a/pkg/super_node/btc/payload_fetcher.go +++ b/pkg/btc/payload_fetcher.go @@ -23,7 +23,7 @@ import ( "github.com/btcsuite/btcd/wire" "github.com/btcsuite/btcutil" - "github.com/vulcanize/vulcanizedb/pkg/super_node/shared" + "github.com/vulcanize/ipfs-chain-watcher/pkg/shared" ) // PayloadFetcher satisfies the PayloadFetcher interface for bitcoin diff --git a/pkg/super_node/btc/publish_and_indexer.go b/pkg/btc/publish_and_indexer.go similarity index 95% rename from pkg/super_node/btc/publish_and_indexer.go rename to pkg/btc/publish_and_indexer.go index 1b186b88..3c546a08 100644 --- a/pkg/super_node/btc/publish_and_indexer.go +++ b/pkg/btc/publish_and_indexer.go @@ -20,9 +20,9 @@ import ( "fmt" "strconv" - "github.com/vulcanize/vulcanizedb/pkg/ipfs/ipld" - "github.com/vulcanize/vulcanizedb/pkg/postgres" - "github.com/vulcanize/vulcanizedb/pkg/super_node/shared" + "github.com/vulcanize/ipfs-chain-watcher/pkg/ipfs/ipld" + "github.com/vulcanize/ipfs-chain-watcher/pkg/postgres" + "github.com/vulcanize/ipfs-chain-watcher/pkg/shared" ) // IPLDPublisherAndIndexer satisfies the IPLDPublisher interface for bitcoin diff --git a/pkg/super_node/btc/publish_and_indexer_test.go b/pkg/btc/publish_and_indexer_test.go similarity index 94% rename from pkg/super_node/btc/publish_and_indexer_test.go rename to pkg/btc/publish_and_indexer_test.go index b989bb27..151b8d4f 100644 --- a/pkg/super_node/btc/publish_and_indexer_test.go +++ b/pkg/btc/publish_and_indexer_test.go @@ -25,12 +25,12 @@ import ( "github.com/multiformats/go-multihash" . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" - "github.com/vulcanize/vulcanizedb/pkg/ipfs/ipld" + "github.com/vulcanize/ipfs-chain-watcher/pkg/ipfs/ipld" - "github.com/vulcanize/vulcanizedb/pkg/postgres" - "github.com/vulcanize/vulcanizedb/pkg/super_node/btc" - "github.com/vulcanize/vulcanizedb/pkg/super_node/btc/mocks" - "github.com/vulcanize/vulcanizedb/pkg/super_node/shared" + "github.com/vulcanize/ipfs-chain-watcher/pkg/btc" + "github.com/vulcanize/ipfs-chain-watcher/pkg/btc/mocks" + "github.com/vulcanize/ipfs-chain-watcher/pkg/postgres" + "github.com/vulcanize/ipfs-chain-watcher/pkg/shared" ) var _ = Describe("PublishAndIndexer", func() { diff --git a/pkg/super_node/btc/publisher.go b/pkg/btc/publisher.go similarity index 94% rename from pkg/super_node/btc/publisher.go rename to pkg/btc/publisher.go index 8ab6bcde..5800c5b0 100644 --- a/pkg/super_node/btc/publisher.go +++ b/pkg/btc/publisher.go @@ -20,10 +20,10 @@ import ( "fmt" "strconv" - "github.com/vulcanize/vulcanizedb/pkg/ipfs" - "github.com/vulcanize/vulcanizedb/pkg/ipfs/dag_putters" - "github.com/vulcanize/vulcanizedb/pkg/ipfs/ipld" - "github.com/vulcanize/vulcanizedb/pkg/super_node/shared" + "github.com/vulcanize/ipfs-chain-watcher/pkg/ipfs" + "github.com/vulcanize/ipfs-chain-watcher/pkg/ipfs/dag_putters" + "github.com/vulcanize/ipfs-chain-watcher/pkg/ipfs/ipld" + "github.com/vulcanize/ipfs-chain-watcher/pkg/shared" ) // IPLDPublisher satisfies the IPLDPublisher for ethereum diff --git a/pkg/super_node/btc/publisher_test.go b/pkg/btc/publisher_test.go similarity index 94% rename from pkg/super_node/btc/publisher_test.go rename to pkg/btc/publisher_test.go index 5d92286a..9dd813c9 100644 --- a/pkg/super_node/btc/publisher_test.go +++ b/pkg/btc/publisher_test.go @@ -23,9 +23,9 @@ import ( . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" - mocks2 "github.com/vulcanize/vulcanizedb/pkg/ipfs/mocks" - "github.com/vulcanize/vulcanizedb/pkg/super_node/btc" - "github.com/vulcanize/vulcanizedb/pkg/super_node/btc/mocks" + "github.com/vulcanize/ipfs-chain-watcher/pkg/btc" + "github.com/vulcanize/ipfs-chain-watcher/pkg/btc/mocks" + mocks2 "github.com/vulcanize/ipfs-chain-watcher/pkg/ipfs/mocks" ) var ( diff --git a/pkg/super_node/btc/streamer.go b/pkg/btc/streamer.go similarity index 97% rename from pkg/super_node/btc/streamer.go rename to pkg/btc/streamer.go index 9c5f4839..3ca1e4a0 100644 --- a/pkg/super_node/btc/streamer.go +++ b/pkg/btc/streamer.go @@ -22,7 +22,7 @@ import ( "github.com/btcsuite/btcutil" "github.com/sirupsen/logrus" - "github.com/vulcanize/vulcanizedb/pkg/super_node/shared" + "github.com/vulcanize/ipfs-chain-watcher/pkg/shared" ) const ( diff --git a/pkg/super_node/btc/subscription_config.go b/pkg/btc/subscription_config.go similarity index 98% rename from pkg/super_node/btc/subscription_config.go rename to pkg/btc/subscription_config.go index 8105eb87..8028b22c 100644 --- a/pkg/super_node/btc/subscription_config.go +++ b/pkg/btc/subscription_config.go @@ -22,7 +22,7 @@ import ( "github.com/spf13/viper" - "github.com/vulcanize/vulcanizedb/pkg/super_node/shared" + "github.com/vulcanize/ipfs-chain-watcher/pkg/shared" ) // SubscriptionSettings config is used by a subscriber to specify what bitcoin data to stream from the super node diff --git a/pkg/super_node/btc/test_helpers.go b/pkg/btc/test_helpers.go similarity index 96% rename from pkg/super_node/btc/test_helpers.go rename to pkg/btc/test_helpers.go index 01fa5af0..0a7f1d06 100644 --- a/pkg/super_node/btc/test_helpers.go +++ b/pkg/btc/test_helpers.go @@ -19,7 +19,7 @@ package btc import ( . "github.com/onsi/gomega" - "github.com/vulcanize/vulcanizedb/pkg/postgres" + "github.com/vulcanize/ipfs-chain-watcher/pkg/postgres" ) // TearDownDB is used to tear down the super node dbs after tests diff --git a/pkg/super_node/btc/types.go b/pkg/btc/types.go similarity index 97% rename from pkg/super_node/btc/types.go rename to pkg/btc/types.go index 292984f3..09aec08a 100644 --- a/pkg/super_node/btc/types.go +++ b/pkg/btc/types.go @@ -19,7 +19,7 @@ package btc import ( "math/big" - "github.com/vulcanize/vulcanizedb/pkg/ipfs" + "github.com/vulcanize/ipfs-chain-watcher/pkg/ipfs" "github.com/btcsuite/btcd/wire" "github.com/btcsuite/btcutil" diff --git a/pkg/eth/client/eth_client.go b/pkg/client/eth_client.go similarity index 100% rename from pkg/eth/client/eth_client.go rename to pkg/client/eth_client.go diff --git a/pkg/eth/client/rpc_client.go b/pkg/client/rpc_client.go similarity index 100% rename from pkg/eth/client/rpc_client.go rename to pkg/client/rpc_client.go diff --git a/pkg/config/config_test.go b/pkg/config/config_test.go index 0daa5e77..7591d836 100644 --- a/pkg/config/config_test.go +++ b/pkg/config/config_test.go @@ -38,7 +38,7 @@ var _ = Describe("Loading the config", func() { It("reads the private config using the environment", func() { viper.SetConfigName("config") - viper.AddConfigPath("$GOPATH/src/github.com/vulcanize/vulcanizedb/environments/") + viper.AddConfigPath("$GOPATH/src/github.com/vulcanize/ipfs-chain-watcher/environments/") Expect(viper.Get("client.ipcpath")).To(BeNil()) testConfig := viper.New() diff --git a/pkg/config/contract.go b/pkg/config/contract.go deleted file mode 100644 index 77f71678..00000000 --- a/pkg/config/contract.go +++ /dev/null @@ -1,218 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package config - -import ( - "strings" - - log "github.com/sirupsen/logrus" - "github.com/spf13/viper" - "github.com/vulcanize/vulcanizedb/pkg/eth" -) - -// Config struct for generic contract transformer -type ContractConfig struct { - // Name for the transformer - Name string - - // Ethereum network name; default "" is mainnet - Network string - - // List of contract addresses (map to ensure no duplicates) - Addresses map[string]bool - - // Map of contract address to abi - // If an address has no associated abi the parser will attempt to fetch one from etherscan - Abis map[string]string - - // Map of contract address to slice of events - // Used to set which addresses to watch - // If any events are listed in the slice only those will be watched - // Otherwise all events in the contract ABI are watched - Events map[string][]string - - // Map of contract address to slice of methods - // If any methods are listed in the slice only those will be polled - // Otherwise no methods will be polled - Methods map[string][]string - - // Map of contract address to slice of event arguments to filter for - // If arguments are provided then only events which emit those arguments are watched - // Otherwise arguments are not filtered on events - EventArgs map[string][]string - - // Map of contract address to slice of method arguments to limit polling to - // If arguments are provided then only those arguments are allowed as arguments in method polling - // Otherwise any argument of the right type seen emitted from events at that contract will be used in method polling - MethodArgs map[string][]string - - // Map of contract address to their starting block - StartingBlocks map[string]int64 - - // Map of contract address to whether or not to pipe method polling results forward into subsequent method calls - Piping map[string]bool -} - -func (contractConfig *ContractConfig) PrepConfig() { - addrs := viper.GetStringSlice("contract.addresses") - contractConfig.Network = viper.GetString("contract.network") - contractConfig.Addresses = make(map[string]bool, len(addrs)) - contractConfig.Abis = make(map[string]string, len(addrs)) - contractConfig.Methods = make(map[string][]string, len(addrs)) - contractConfig.Events = make(map[string][]string, len(addrs)) - contractConfig.MethodArgs = make(map[string][]string, len(addrs)) - contractConfig.EventArgs = make(map[string][]string, len(addrs)) - contractConfig.StartingBlocks = make(map[string]int64, len(addrs)) - contractConfig.Piping = make(map[string]bool, len(addrs)) - // De-dupe addresses - for _, addr := range addrs { - contractConfig.Addresses[strings.ToLower(addr)] = true - } - - // Iterate over addresses to pull out config info for each contract - for _, addr := range addrs { - transformer := viper.GetStringMap("contract." + addr) - - // Get and check abi - var abi string - abiInterface, abiOK := transformer["abi"] - if !abiOK { - log.Warnf("contract %s not configured with an ABI, will attempt to fetch it from Etherscan\r\n", addr) - } else { - abi, abiOK = abiInterface.(string) - if !abiOK { - log.Fatal(addr, "transformer `abi` not of type []string") - } - } - if abi != "" { - if _, abiErr := eth.ParseAbi(abi); abiErr != nil { - log.Fatal(addr, "transformer `abi` not valid JSON") - } - } - contractConfig.Abis[strings.ToLower(addr)] = abi - - // Get and check events - events := make([]string, 0) - eventsInterface, eventsOK := transformer["events"] - if !eventsOK { - log.Warnf("contract %s not configured with a list of events to watch, will watch all events\r\n", addr) - events = []string{} - } else { - eventsI, eventsOK := eventsInterface.([]interface{}) - if !eventsOK { - log.Fatal(addr, "transformer `events` not of type []string\r\n") - } - for _, strI := range eventsI { - str, strOK := strI.(string) - if !strOK { - log.Fatal(addr, "transformer `events` not of type []string\r\n") - } - events = append(events, str) - } - } - contractConfig.Events[strings.ToLower(addr)] = events - - // Get and check methods - methods := make([]string, 0) - methodsInterface, methodsOK := transformer["methods"] - if !methodsOK { - log.Warnf("contract %s not configured with a list of methods to poll, will not poll any methods\r\n", addr) - methods = []string{} - } else { - methodsI, methodsOK := methodsInterface.([]interface{}) - if !methodsOK { - log.Fatal(addr, "transformer `methods` not of type []string\r\n") - } - for _, strI := range methodsI { - str, strOK := strI.(string) - if !strOK { - log.Fatal(addr, "transformer `methods` not of type []string\r\n") - } - methods = append(methods, str) - } - } - contractConfig.Methods[strings.ToLower(addr)] = methods - - // Get and check eventArgs - eventArgs := make([]string, 0) - eventArgsInterface, eventArgsOK := transformer["eventArgs"] - if !eventArgsOK { - log.Warnf("contract %s not configured with a list of event arguments to filter for, will not filter events for specific emitted values\r\n", addr) - eventArgs = []string{} - } else { - eventArgsI, eventArgsOK := eventArgsInterface.([]interface{}) - if !eventArgsOK { - log.Fatal(addr, "transformer `eventArgs` not of type []string\r\n") - } - for _, strI := range eventArgsI { - str, strOK := strI.(string) - if !strOK { - log.Fatal(addr, "transformer `eventArgs` not of type []string\r\n") - } - eventArgs = append(eventArgs, str) - } - } - contractConfig.EventArgs[strings.ToLower(addr)] = eventArgs - - // Get and check methodArgs - methodArgs := make([]string, 0) - methodArgsInterface, methodArgsOK := transformer["methodArgs"] - if !methodArgsOK { - log.Warnf("contract %s not configured with a list of method argument values to poll with, will poll methods with all available arguments\r\n", addr) - methodArgs = []string{} - } else { - methodArgsI, methodArgsOK := methodArgsInterface.([]interface{}) - if !methodArgsOK { - log.Fatal(addr, "transformer `methodArgs` not of type []string\r\n") - } - for _, strI := range methodArgsI { - str, strOK := strI.(string) - if !strOK { - log.Fatal(addr, "transformer `methodArgs` not of type []string\r\n") - } - methodArgs = append(methodArgs, str) - } - } - contractConfig.MethodArgs[strings.ToLower(addr)] = methodArgs - - // Get and check startingBlock - startInterface, startOK := transformer["startingblock"] - if !startOK { - log.Fatal(addr, "transformer config is missing `startingBlock` value\r\n") - } - start, startOK := startInterface.(int64) - if !startOK { - log.Fatal(addr, "transformer `startingBlock` not of type int\r\n") - } - contractConfig.StartingBlocks[strings.ToLower(addr)] = start - - // Get pipping - var piping bool - _, pipeOK := transformer["piping"] - if !pipeOK { - log.Warnf("contract %s does not have its `piping` set, by default piping is turned off\r\n", addr) - piping = false - } else { - pipingInterface := transformer["piping"] - piping, pipeOK = pipingInterface.(bool) - if !pipeOK { - log.Fatal(addr, "transformer `piping` not of type bool\r\n") - } - } - contractConfig.Piping[strings.ToLower(addr)] = piping - } -} diff --git a/pkg/config/plugin.go b/pkg/config/plugin.go deleted file mode 100644 index 92461fba..00000000 --- a/pkg/config/plugin.go +++ /dev/null @@ -1,167 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package config - -import ( - "errors" - "fmt" - "path/filepath" - "strings" - - "github.com/vulcanize/vulcanizedb/pkg/plugin/helpers" -) - -type Plugin struct { - Transformers map[string]Transformer - FilePath string - FileName string - Save bool - Home string -} - -type Transformer struct { - Path string - Type TransformerType - MigrationPath string - MigrationRank uint64 - RepositoryPath string -} - -func (pluginConfig *Plugin) GetPluginPaths() (string, string, error) { - path, err := helpers.CleanPath(pluginConfig.FilePath) - if err != nil { - return "", "", err - } - - name := strings.Split(pluginConfig.FileName, ".")[0] - goFile := filepath.Join(path, name+".go") - soFile := filepath.Join(path, name+".so") - - return goFile, soFile, nil -} - -// Removes duplicate migration paths and returns them in ranked order -func (pluginConfig *Plugin) GetMigrationsPaths() ([]string, error) { - paths := make(map[uint64]string) - highestRank := -1 - for name, transformer := range pluginConfig.Transformers { - repo := transformer.RepositoryPath - mig := transformer.MigrationPath - path := filepath.Join("$GOPATH/src", pluginConfig.Home, "vendor", repo, mig) - cleanPath, err := helpers.CleanPath(path) - if err != nil { - return nil, err - } - // If there is a different path with the same rank then we have a conflict - _, ok := paths[transformer.MigrationRank] - if ok { - conflictingPath := paths[transformer.MigrationRank] - if conflictingPath != cleanPath { - return nil, fmt.Errorf("transformer %s has the same migration rank (%d) as another transformer", name, transformer.MigrationRank) - } - } - paths[transformer.MigrationRank] = cleanPath - if int(transformer.MigrationRank) >= highestRank { - highestRank = int(transformer.MigrationRank) - } - } - // Check for gaps and duplicates - if len(paths) != (highestRank + 1) { - return []string{}, errors.New("number of distinct ranks does not match number of distinct migration paths") - } - if anyDupes(paths) { - return []string{}, errors.New("duplicate paths with different ranks present") - } - - sortedPaths := make([]string, len(paths)) - for rank, path := range paths { - sortedPaths[rank] = path - } - - return sortedPaths, nil -} - -// Removes duplicate repo paths before returning them -func (pluginConfig *Plugin) GetRepoPaths() map[string]bool { - paths := make(map[string]bool) - for _, transformer := range pluginConfig.Transformers { - paths[transformer.RepositoryPath] = true - } - - return paths -} - -type TransformerType int - -const ( - UnknownTransformerType TransformerType = iota - EthEvent - EthStorage - EthContract -) - -func (transformerType TransformerType) String() string { - names := [...]string{ - "Unknown", - "eth_event", - "eth_storage", - "eth_contract", - } - - if transformerType > EthContract || transformerType < EthEvent { - return "Unknown" - } - - return names[transformerType] -} - -func GetTransformerType(str string) TransformerType { - types := [...]TransformerType{ - EthEvent, - EthStorage, - EthContract, - } - - for _, ty := range types { - if ty.String() == str { - return ty - } - } - - return UnknownTransformerType -} - -func anyDupes(list map[uint64]string) bool { - seen := make([]string, 0, len(list)) - for _, str := range list { - dupe := inList(str, seen) - if dupe { - return true - } - seen = append(seen, str) - } - return false -} - -func inList(str string, list []string) bool { - for _, element := range list { - if str == element { - return true - } - } - return false -} diff --git a/pkg/config/plugin_test.go b/pkg/config/plugin_test.go deleted file mode 100644 index 7eeed236..00000000 --- a/pkg/config/plugin_test.go +++ /dev/null @@ -1,242 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package config_test - -import ( - "os" - "path/filepath" - - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - "github.com/vulcanize/vulcanizedb/pkg/config" -) - -var allDifferentPathsConfig = config.Plugin{ - Transformers: map[string]config.Transformer{ - "transformer1": { - Path: "test/init/path", - Type: config.EthEvent, - MigrationPath: "test/migration/path1", - MigrationRank: 0, - RepositoryPath: "test/repo/path", - }, - "transformer2": { - Path: "test/init/path", - Type: config.EthEvent, - MigrationPath: "test/migration/path2", - MigrationRank: 2, - RepositoryPath: "test/repo/path", - }, - "transformer3": { - Path: "test/init/path2", - Type: config.EthEvent, - MigrationPath: "test/migration/path3", - MigrationRank: 1, - RepositoryPath: "test/repo/path", - }, - }, -} - -var overlappingPathsConfig = config.Plugin{ - Transformers: map[string]config.Transformer{ - "transformer1": { - Path: "test/init/path", - Type: config.EthEvent, - MigrationPath: "test/migration/path1", - MigrationRank: 0, - RepositoryPath: "test/repo/path", - }, - "transformer2": { - Path: "test/init/path", - Type: config.EthEvent, - MigrationPath: "test/migration/path1", - MigrationRank: 0, - RepositoryPath: "test/repo/path", - }, - "transformer3": { - Path: "test/init/path2", - Type: config.EthEvent, - MigrationPath: "test/migration/path3", - MigrationRank: 1, - RepositoryPath: "test/repo/path", - }, - }, -} - -var conflictErrorConfig = config.Plugin{ - Transformers: map[string]config.Transformer{ - "transformer1": { - Path: "test/init/path", - Type: config.EthEvent, - MigrationPath: "test/migration/path1", - MigrationRank: 0, - RepositoryPath: "test/repo/path", - }, - "transformer2": { - Path: "test/init/path", - Type: config.EthEvent, - MigrationPath: "test/migration/path2", - MigrationRank: 0, - RepositoryPath: "test/repo/path", - }, - "transformer3": { - Path: "test/init/path2", - Type: config.EthEvent, - MigrationPath: "test/migration/path3", - MigrationRank: 1, - RepositoryPath: "test/repo/path", - }, - }, -} - -var gapErrorConfig = config.Plugin{ - Transformers: map[string]config.Transformer{ - "transformer1": { - Path: "test/init/path", - Type: config.EthEvent, - MigrationPath: "test/migration/path1", - MigrationRank: 0, - RepositoryPath: "test/repo/path", - }, - "transformer2": { - Path: "test/init/path", - Type: config.EthEvent, - MigrationPath: "test/migration/path2", - MigrationRank: 3, - RepositoryPath: "test/repo/path", - }, - "transformer3": { - Path: "test/init/path2", - Type: config.EthEvent, - MigrationPath: "test/migration/path3", - MigrationRank: 1, - RepositoryPath: "test/repo/path", - }, - }, -} - -var missingRankErrorConfig = config.Plugin{ - Transformers: map[string]config.Transformer{ - "transformer1": { - Path: "test/init/path", - Type: config.EthEvent, - MigrationPath: "test/migration/path1", - MigrationRank: 0, - RepositoryPath: "test/repo/path", - }, - "transformer2": { - Path: "test/init/path", - Type: config.EthEvent, - MigrationPath: "test/migration/path2", - RepositoryPath: "test/repo/path", - }, - "transformer3": { - Path: "test/init/path2", - Type: config.EthEvent, - MigrationPath: "test/migration/path3", - MigrationRank: 1, - RepositoryPath: "test/repo/path", - }, - }, -} - -var duplicateErrorConfig = config.Plugin{ - Transformers: map[string]config.Transformer{ - "transformer1": { - Path: "test/init/path", - Type: config.EthEvent, - MigrationPath: "test/migration/path1", - MigrationRank: 0, - RepositoryPath: "test/repo/path", - }, - "transformer2": { - Path: "test/init/path", - Type: config.EthEvent, - MigrationPath: "test/migration/path1", - RepositoryPath: "test/repo/path", - MigrationRank: 2, - }, - "transformer3": { - Path: "test/init/path2", - Type: config.EthEvent, - MigrationPath: "test/migration/path3", - MigrationRank: 1, - RepositoryPath: "test/repo/path", - }, - }, -} - -var _ = Describe("GetMigrationsPaths", func() { - It("Sorts migration paths by rank", func() { - plugin := allDifferentPathsConfig - migrationPaths, err := plugin.GetMigrationsPaths() - Expect(err).ToNot(HaveOccurred()) - Expect(len(migrationPaths)).To(Equal(3)) - - env := os.Getenv("GOPATH") - path1 := filepath.Join(env, "src/vendor/test/repo/path/test/migration/path1") - path2 := filepath.Join(env, "src/vendor/test/repo/path/test/migration/path3") - path3 := filepath.Join(env, "src/vendor/test/repo/path/test/migration/path2") - expectedMigrationPaths := []string{path1, path2, path3} - Expect(migrationPaths).To(Equal(expectedMigrationPaths)) - }) - - It("Expects identical migration paths to have the same rank", func() { - plugin := overlappingPathsConfig - migrationPaths, err := plugin.GetMigrationsPaths() - Expect(err).ToNot(HaveOccurred()) - Expect(len(migrationPaths)).To(Equal(2)) - - env := os.Getenv("GOPATH") - path1 := filepath.Join(env, "src/vendor/test/repo/path/test/migration/path1") - path2 := filepath.Join(env, "src/vendor/test/repo/path/test/migration/path3") - expectedMigrationPaths := []string{path1, path2} - Expect(migrationPaths).To(Equal(expectedMigrationPaths)) - }) - - It("Fails if two different migration paths have the same rank", func() { - plugin := conflictErrorConfig - migrationPaths, err := plugin.GetMigrationsPaths() - Expect(err).To(HaveOccurred()) - Expect(len(migrationPaths)).To(Equal(0)) - Expect(err.Error()).To(ContainSubstring("has the same migration rank")) - }) - - It("Fails if there is a gap in the ranks of the migration paths", func() { - plugin := gapErrorConfig - migrationPaths, err := plugin.GetMigrationsPaths() - Expect(err).To(HaveOccurred()) - Expect(len(migrationPaths)).To(Equal(0)) - Expect(err.Error()).To(ContainSubstring("number of distinct ranks does not match number of distinct migration paths")) - }) - - It("Fails if a transformer is missing its rank", func() { - plugin := missingRankErrorConfig - migrationPaths, err := plugin.GetMigrationsPaths() - Expect(err).To(HaveOccurred()) - Expect(len(migrationPaths)).To(Equal(0)) - Expect(err.Error()).To(ContainSubstring("has the same migration rank")) - }) - - It("Fails if the same migration path has more than one rank", func() { - plugin := duplicateErrorConfig - migrationPaths, err := plugin.GetMigrationsPaths() - Expect(err).To(HaveOccurred()) - Expect(len(migrationPaths)).To(Equal(0)) - Expect(err.Error()).To(ContainSubstring("duplicate paths with different ranks present")) - }) -}) diff --git a/pkg/eth/core/eth_client.go b/pkg/core/eth_client.go similarity index 100% rename from pkg/eth/core/eth_client.go rename to pkg/core/eth_client.go diff --git a/pkg/eth/core/node_info.go b/pkg/core/node_info.go similarity index 100% rename from pkg/eth/core/node_info.go rename to pkg/core/node_info.go diff --git a/pkg/eth/core/rpc_client.go b/pkg/core/rpc_client.go similarity index 95% rename from pkg/eth/core/rpc_client.go rename to pkg/core/rpc_client.go index 5b69417f..0505b193 100644 --- a/pkg/eth/core/rpc_client.go +++ b/pkg/core/rpc_client.go @@ -21,7 +21,7 @@ import ( "github.com/ethereum/go-ethereum/rpc" - "github.com/vulcanize/vulcanizedb/pkg/eth/client" + "github.com/vulcanize/ipfs-chain-watcher/pkg/client" ) type RPCClient interface { diff --git a/pkg/eth/abi.go b/pkg/eth/abi.go deleted file mode 100644 index 403a9d70..00000000 --- a/pkg/eth/abi.go +++ /dev/null @@ -1,105 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package eth - -import ( - "errors" - "strings" - - "encoding/json" - "fmt" - "net/http" - "time" - - "github.com/ethereum/go-ethereum/accounts/abi" - "github.com/vulcanize/vulcanizedb/pkg/fs" -) - -var ( - ErrInvalidAbiFile = errors.New("invalid abi") - ErrMissingAbiFile = errors.New("missing abi") - ErrAPIRequestFailed = errors.New("etherscan api request failed") -) - -type Response struct { - Status string - Message string - Result string -} - -type EtherScanAPI struct { - client *http.Client - url string -} - -func NewEtherScanClient(url string) *EtherScanAPI { - return &EtherScanAPI{ - client: &http.Client{Timeout: 10 * time.Second}, - url: url, - } -} - -func GenURL(network string) string { - switch network { - case "ropsten": - return "https://ropsten.etherscan.io" - case "kovan": - return "https://kovan.etherscan.io" - case "rinkeby": - return "https://rinkeby.etherscan.io" - default: - return "https://api.etherscan.io" - } -} - -//https://api.etherscan.io/api?module=contract&action=getabi&address=%s -func (e *EtherScanAPI) GetAbi(contractHash string) (string, error) { - target := new(Response) - request := fmt.Sprintf("%s/api?module=contract&action=getabi&address=%s", e.url, contractHash) - r, err := e.client.Get(request) - if err != nil { - return "", ErrAPIRequestFailed - } - defer r.Body.Close() - err = json.NewDecoder(r.Body).Decode(&target) - return target.Result, err -} - -func ParseAbiFile(abiFilePath string) (abi.ABI, error) { - abiString, err := ReadAbiFile(abiFilePath) - if err != nil { - return abi.ABI{}, ErrMissingAbiFile - } - return ParseAbi(abiString) -} - -func ParseAbi(abiString string) (abi.ABI, error) { - parsedAbi, err := abi.JSON(strings.NewReader(abiString)) - if err != nil { - return abi.ABI{}, ErrInvalidAbiFile - } - return parsedAbi, nil -} - -func ReadAbiFile(abiFilePath string) (string, error) { - reader := fs.FsReader{} - filesBytes, err := reader.Read(abiFilePath) - if err != nil { - return "", ErrMissingAbiFile - } - return string(filesBytes), nil -} diff --git a/pkg/eth/abi_test.go b/pkg/eth/abi_test.go deleted file mode 100644 index ee0b84a8..00000000 --- a/pkg/eth/abi_test.go +++ /dev/null @@ -1,135 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package eth_test - -import ( - "net/http" - - "fmt" - - "github.com/ethereum/go-ethereum/accounts/abi" - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - "github.com/onsi/gomega/ghttp" - "github.com/vulcanize/vulcanizedb/pkg/eth" - "github.com/vulcanize/vulcanizedb/test_config" -) - -var _ = Describe("ABI files", func() { - - Describe("Reading ABI files", func() { - - It("loads a valid ABI file", func() { - path := test_config.ABIFilePath + "valid_abi.json" - - contractAbi, err := eth.ParseAbiFile(path) - - Expect(contractAbi).NotTo(BeNil()) - Expect(err).To(BeNil()) - }) - - It("reads the contents of a valid ABI file", func() { - path := test_config.ABIFilePath + "valid_abi.json" - - contractAbi, err := eth.ReadAbiFile(path) - - Expect(contractAbi).To(Equal("[{\"foo\": \"bar\"}]")) - Expect(err).To(BeNil()) - }) - - It("returns an error when the file does not exist", func() { - path := test_config.ABIFilePath + "missing_abi.json" - - contractAbi, err := eth.ParseAbiFile(path) - - Expect(contractAbi).To(Equal(abi.ABI{})) - Expect(err).To(Equal(eth.ErrMissingAbiFile)) - }) - - It("returns an error when the file has invalid contents", func() { - path := test_config.ABIFilePath + "invalid_abi.json" - - contractAbi, err := eth.ParseAbiFile(path) - - Expect(contractAbi).To(Equal(abi.ABI{})) - Expect(err).To(Equal(eth.ErrInvalidAbiFile)) - }) - - Describe("Request ABI from endpoint", func() { - - var ( - server *ghttp.Server - client *eth.EtherScanAPI - abiString string - err error - ) - - BeforeEach(func() { - server = ghttp.NewServer() - client = eth.NewEtherScanClient(server.URL()) - path := test_config.ABIFilePath + "sample_abi.json" - abiString, err = eth.ReadAbiFile(path) - - Expect(err).NotTo(HaveOccurred()) - _, err = eth.ParseAbi(abiString) - Expect(err).NotTo(HaveOccurred()) - }) - - AfterEach(func() { - server.Close() - }) - - Describe("Fetching ABI from api (etherscan)", func() { - BeforeEach(func() { - - response := fmt.Sprintf(`{"status":"1","message":"OK","result":%q}`, abiString) - server.AppendHandlers( - ghttp.CombineHandlers( - ghttp.VerifyRequest("GET", "/api", "module=contract&action=getabi&address=0xd26114cd6EE289AccF82350c8d8487fedB8A0C07"), - ghttp.RespondWith(http.StatusOK, response), - ), - ) - }) - - It("should make a GET request with supplied contract hash", func() { - - abi, err := client.GetAbi("0xd26114cd6EE289AccF82350c8d8487fedB8A0C07") - Expect(server.ReceivedRequests()).Should(HaveLen(1)) - Expect(err).ShouldNot(HaveOccurred()) - Expect(abi).Should(Equal(abiString)) - }) - }) - }) - - Describe("Generating etherscan endpoints based on network", func() { - It("should return the main endpoint as the default", func() { - url := eth.GenURL("") - Expect(url).To(Equal("https://api.etherscan.io")) - }) - - It("generates various test network endpoint if test network is supplied", func() { - ropstenUrl := eth.GenURL("ropsten") - rinkebyUrl := eth.GenURL("rinkeby") - kovanUrl := eth.GenURL("kovan") - - Expect(ropstenUrl).To(Equal("https://ropsten.etherscan.io")) - Expect(kovanUrl).To(Equal("https://kovan.etherscan.io")) - Expect(rinkebyUrl).To(Equal("https://rinkeby.etherscan.io")) - }) - }) - }) -}) diff --git a/pkg/super_node/eth/api.go b/pkg/eth/api.go similarity index 99% rename from pkg/super_node/eth/api.go rename to pkg/eth/api.go index 9614d3fb..533cec8d 100644 --- a/pkg/super_node/eth/api.go +++ b/pkg/eth/api.go @@ -20,7 +20,7 @@ import ( "context" "math/big" - "github.com/vulcanize/vulcanizedb/pkg/super_node/shared" + "github.com/vulcanize/ipfs-chain-watcher/pkg/shared" "github.com/ethereum/go-ethereum" "github.com/ethereum/go-ethereum/common" diff --git a/pkg/super_node/eth/api_test.go b/pkg/eth/api_test.go similarity index 98% rename from pkg/super_node/eth/api_test.go rename to pkg/eth/api_test.go index 0c016486..c82a9726 100644 --- a/pkg/super_node/eth/api_test.go +++ b/pkg/eth/api_test.go @@ -30,10 +30,10 @@ import ( . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" - "github.com/vulcanize/vulcanizedb/pkg/postgres" - "github.com/vulcanize/vulcanizedb/pkg/super_node/eth" - "github.com/vulcanize/vulcanizedb/pkg/super_node/eth/mocks" - "github.com/vulcanize/vulcanizedb/pkg/super_node/shared" + "github.com/vulcanize/ipfs-chain-watcher/pkg/eth" + "github.com/vulcanize/ipfs-chain-watcher/pkg/eth/mocks" + "github.com/vulcanize/ipfs-chain-watcher/pkg/postgres" + "github.com/vulcanize/ipfs-chain-watcher/pkg/shared" ) var ( diff --git a/pkg/super_node/eth/backend.go b/pkg/eth/backend.go similarity index 99% rename from pkg/super_node/eth/backend.go rename to pkg/eth/backend.go index fd6ce01d..360e0136 100644 --- a/pkg/super_node/eth/backend.go +++ b/pkg/eth/backend.go @@ -22,15 +22,15 @@ import ( "fmt" "math/big" - "github.com/vulcanize/vulcanizedb/pkg/super_node/shared" + "github.com/vulcanize/ipfs-chain-watcher/pkg/shared" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/ethereum/go-ethereum/core/types" "github.com/ethereum/go-ethereum/rlp" "github.com/ethereum/go-ethereum/rpc" - "github.com/vulcanize/vulcanizedb/pkg/ipfs" - "github.com/vulcanize/vulcanizedb/pkg/postgres" + "github.com/vulcanize/ipfs-chain-watcher/pkg/ipfs" + "github.com/vulcanize/ipfs-chain-watcher/pkg/postgres" ) var ( diff --git a/pkg/eth/blockchain.go b/pkg/eth/blockchain.go deleted file mode 100644 index 67f54c18..00000000 --- a/pkg/eth/blockchain.go +++ /dev/null @@ -1,279 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package eth - -import ( - "errors" - - "math/big" - "strconv" - - "github.com/ethereum/go-ethereum" - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/common/hexutil" - "github.com/ethereum/go-ethereum/core/types" - "github.com/sirupsen/logrus" - "golang.org/x/net/context" - - "github.com/vulcanize/vulcanizedb/pkg/eth/client" - vulcCommon "github.com/vulcanize/vulcanizedb/pkg/eth/converters/common" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" -) - -var ErrEmptyHeader = errors.New("empty header returned over RPC") - -const MAX_BATCH_SIZE = 100 - -type BlockChain struct { - blockConverter vulcCommon.BlockConverter - ethClient core.EthClient - headerConverter vulcCommon.HeaderConverter - node core.Node - rpcClient core.RPCClient - transactionConverter vulcCommon.TransactionConverter -} - -func NewBlockChain(ethClient core.EthClient, rpcClient core.RPCClient, node core.Node, converter vulcCommon.TransactionConverter) *BlockChain { - return &BlockChain{ - blockConverter: vulcCommon.NewBlockConverter(converter), - ethClient: ethClient, - headerConverter: vulcCommon.HeaderConverter{}, - node: node, - rpcClient: rpcClient, - transactionConverter: converter, - } -} - -func (blockChain *BlockChain) GetBlockByNumber(blockNumber int64) (block core.Block, err error) { - gethBlock, err := blockChain.ethClient.BlockByNumber(context.Background(), big.NewInt(blockNumber)) - logrus.Debugf("GetBlockByNumber called with block %d", blockNumber) - if err != nil { - return block, err - } - return blockChain.blockConverter.ToCoreBlock(gethBlock) -} - -func (blockChain *BlockChain) GetEthLogsWithCustomQuery(query ethereum.FilterQuery) ([]types.Log, error) { - gethLogs, err := blockChain.ethClient.FilterLogs(context.Background(), query) - logrus.Debug("GetEthLogsWithCustomQuery called") - if err != nil { - return []types.Log{}, err - } - return gethLogs, nil -} - -func (blockChain *BlockChain) GetHeaderByNumber(blockNumber int64) (header core.Header, err error) { - logrus.Debugf("GetHeaderByNumber called with block %d", blockNumber) - if blockChain.node.NetworkID == string(core.KOVAN_NETWORK_ID) { - return blockChain.getPOAHeader(blockNumber) - } - return blockChain.getPOWHeader(blockNumber) -} - -func (blockChain *BlockChain) GetHeadersByNumbers(blockNumbers []int64) (header []core.Header, err error) { - logrus.Debug("GetHeadersByNumbers called") - if blockChain.node.NetworkID == string(core.KOVAN_NETWORK_ID) { - return blockChain.getPOAHeaders(blockNumbers) - } - return blockChain.getPOWHeaders(blockNumbers) -} - -func (blockChain *BlockChain) GetFullSyncLogs(contract core.Contract, startingBlockNumber, endingBlockNumber *big.Int) ([]core.FullSyncLog, error) { - if endingBlockNumber == nil { - endingBlockNumber = startingBlockNumber - } - contractAddress := common.HexToAddress(contract.Hash) - fc := ethereum.FilterQuery{ - FromBlock: startingBlockNumber, - ToBlock: endingBlockNumber, - Addresses: []common.Address{contractAddress}, - Topics: nil, - } - gethLogs, err := blockChain.GetEthLogsWithCustomQuery(fc) - if err != nil { - return []core.FullSyncLog{}, err - } - logs := vulcCommon.ToFullSyncLogs(gethLogs) - return logs, nil -} - -func (blockChain *BlockChain) GetTransactions(transactionHashes []common.Hash) ([]core.TransactionModel, error) { - numTransactions := len(transactionHashes) - var batch []client.BatchElem - transactions := make([]core.RPCTransaction, numTransactions) - - for index, transactionHash := range transactionHashes { - batchElem := client.BatchElem{ - Method: "eth_getTransactionByHash", - Result: &transactions[index], - Args: []interface{}{transactionHash}, - } - batch = append(batch, batchElem) - } - - rpcErr := blockChain.rpcClient.BatchCall(batch) - if rpcErr != nil { - return []core.TransactionModel{}, rpcErr - } - - return blockChain.transactionConverter.ConvertRPCTransactionsToModels(transactions) -} - -func (blockChain *BlockChain) LastBlock() (*big.Int, error) { - block, err := blockChain.ethClient.HeaderByNumber(context.Background(), nil) - if err != nil { - return big.NewInt(0), err - } - return block.Number, err -} - -func (blockChain *BlockChain) Node() core.Node { - return blockChain.node -} - -func (blockChain *BlockChain) getPOAHeader(blockNumber int64) (header core.Header, err error) { - var POAHeader core.POAHeader - blockNumberArg := hexutil.EncodeBig(big.NewInt(blockNumber)) - includeTransactions := false - err = blockChain.rpcClient.CallContext(context.Background(), &POAHeader, "eth_getBlockByNumber", blockNumberArg, includeTransactions) - if err != nil { - return header, err - } - if POAHeader.Number == nil { - return header, ErrEmptyHeader - } - return blockChain.headerConverter.Convert(&types.Header{ - ParentHash: POAHeader.ParentHash, - UncleHash: POAHeader.UncleHash, - Coinbase: POAHeader.Coinbase, - Root: POAHeader.Root, - TxHash: POAHeader.TxHash, - ReceiptHash: POAHeader.ReceiptHash, - Bloom: POAHeader.Bloom, - Difficulty: POAHeader.Difficulty.ToInt(), - Number: POAHeader.Number.ToInt(), - GasLimit: uint64(POAHeader.GasLimit), - GasUsed: uint64(POAHeader.GasUsed), - Time: uint64(POAHeader.Time), - Extra: POAHeader.Extra, - }, POAHeader.Hash.String()), nil -} - -func (blockChain *BlockChain) getPOAHeaders(blockNumbers []int64) (headers []core.Header, err error) { - - var batch []client.BatchElem - var POAHeaders [MAX_BATCH_SIZE]core.POAHeader - includeTransactions := false - - for index, blockNumber := range blockNumbers { - - if index >= MAX_BATCH_SIZE { - break - } - - blockNumberArg := hexutil.EncodeBig(big.NewInt(blockNumber)) - - batchElem := client.BatchElem{ - Method: "eth_getBlockByNumber", - Result: &POAHeaders[index], - Args: []interface{}{blockNumberArg, includeTransactions}, - } - - batch = append(batch, batchElem) - } - - err = blockChain.rpcClient.BatchCall(batch) - if err != nil { - return headers, err - } - - for _, POAHeader := range POAHeaders { - var header core.Header - //Header.Number of the newest block will return nil. - if _, err := strconv.ParseUint(POAHeader.Number.ToInt().String(), 16, 64); err == nil { - header = blockChain.headerConverter.Convert(&types.Header{ - ParentHash: POAHeader.ParentHash, - UncleHash: POAHeader.UncleHash, - Coinbase: POAHeader.Coinbase, - Root: POAHeader.Root, - TxHash: POAHeader.TxHash, - ReceiptHash: POAHeader.ReceiptHash, - Bloom: POAHeader.Bloom, - Difficulty: POAHeader.Difficulty.ToInt(), - Number: POAHeader.Number.ToInt(), - GasLimit: uint64(POAHeader.GasLimit), - GasUsed: uint64(POAHeader.GasUsed), - Time: uint64(POAHeader.Time), - Extra: POAHeader.Extra, - }, POAHeader.Hash.String()) - - headers = append(headers, header) - } - } - - return headers, err -} - -func (blockChain *BlockChain) getPOWHeader(blockNumber int64) (header core.Header, err error) { - gethHeader, err := blockChain.ethClient.HeaderByNumber(context.Background(), big.NewInt(blockNumber)) - if err != nil { - return header, err - } - return blockChain.headerConverter.Convert(gethHeader, gethHeader.Hash().String()), nil -} - -func (blockChain *BlockChain) getPOWHeaders(blockNumbers []int64) (headers []core.Header, err error) { - var batch []client.BatchElem - var POWHeaders [MAX_BATCH_SIZE]types.Header - includeTransactions := false - - for index, blockNumber := range blockNumbers { - - if index >= MAX_BATCH_SIZE { - break - } - - blockNumberArg := hexutil.EncodeBig(big.NewInt(blockNumber)) - - batchElem := client.BatchElem{ - Method: "eth_getBlockByNumber", - Result: &POWHeaders[index], - Args: []interface{}{blockNumberArg, includeTransactions}, - } - - batch = append(batch, batchElem) - } - - err = blockChain.rpcClient.BatchCall(batch) - if err != nil { - return headers, err - } - - for _, POWHeader := range POWHeaders { - if POWHeader.Number != nil { - header := blockChain.headerConverter.Convert(&POWHeader, POWHeader.Hash().String()) - headers = append(headers, header) - } - } - - return headers, err -} - -func (blockChain *BlockChain) GetAccountBalance(address common.Address, blockNumber *big.Int) (*big.Int, error) { - logrus.Debug("GetAccountBalance called") - return blockChain.ethClient.BalanceAt(context.Background(), address, blockNumber) -} diff --git a/pkg/eth/blockchain_test.go b/pkg/eth/blockchain_test.go deleted file mode 100644 index bfd793cd..00000000 --- a/pkg/eth/blockchain_test.go +++ /dev/null @@ -1,272 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package eth_test - -import ( - "context" - "errors" - "math/big" - - "github.com/ethereum/go-ethereum" - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/common/hexutil" - "github.com/ethereum/go-ethereum/core/types" - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - - "github.com/vulcanize/vulcanizedb/pkg/eth" - vulcCore "github.com/vulcanize/vulcanizedb/pkg/eth/core" - "github.com/vulcanize/vulcanizedb/pkg/eth/fakes" -) - -var _ = Describe("Geth blockchain", func() { - var ( - mockClient *fakes.MockEthClient - blockChain *eth.BlockChain - mockRpcClient *fakes.MockRPCClient - mockTransactionConverter *fakes.MockTransactionConverter - node vulcCore.Node - ) - - BeforeEach(func() { - mockClient = fakes.NewMockEthClient() - mockRpcClient = fakes.NewMockRPCClient() - mockTransactionConverter = fakes.NewMockTransactionConverter() - node = vulcCore.Node{} - blockChain = eth.NewBlockChain(mockClient, mockRpcClient, node, mockTransactionConverter) - }) - - Describe("getting a block", func() { - It("fetches block from ethClient", func() { - mockClient.SetBlockByNumberReturnBlock(types.NewBlockWithHeader(&types.Header{})) - blockNumber := int64(100) - - _, err := blockChain.GetBlockByNumber(blockNumber) - - Expect(err).NotTo(HaveOccurred()) - mockClient.AssertBlockByNumberCalledWith(context.Background(), big.NewInt(blockNumber)) - }) - - It("returns err if ethClient returns err", func() { - mockClient.SetBlockByNumberErr(fakes.FakeError) - - _, err := blockChain.GetBlockByNumber(100) - - Expect(err).To(HaveOccurred()) - Expect(err).To(MatchError(fakes.FakeError)) - }) - }) - - Describe("getting a header", func() { - Describe("default/mainnet", func() { - It("fetches header from ethClient", func() { - blockNumber := int64(100) - mockClient.SetHeaderByNumberReturnHeader(&types.Header{Number: big.NewInt(blockNumber)}) - - _, err := blockChain.GetHeaderByNumber(blockNumber) - - Expect(err).NotTo(HaveOccurred()) - mockClient.AssertHeaderByNumberCalledWith(context.Background(), big.NewInt(blockNumber)) - }) - - It("returns err if ethClient returns err", func() { - mockClient.SetHeaderByNumberErr(fakes.FakeError) - - _, err := blockChain.GetHeaderByNumber(100) - - Expect(err).To(HaveOccurred()) - Expect(err).To(MatchError(fakes.FakeError)) - }) - - It("fetches headers with multiple blocks", func() { - _, err := blockChain.GetHeadersByNumbers([]int64{100, 99}) - - Expect(err).NotTo(HaveOccurred()) - mockRpcClient.AssertBatchCalledWith("eth_getBlockByNumber", 2) - }) - }) - - Describe("POA/Kovan", func() { - It("fetches header from rpcClient", func() { - node.NetworkID = string(vulcCore.KOVAN_NETWORK_ID) - blockNumber := hexutil.Big(*big.NewInt(100)) - mockRpcClient.SetReturnPOAHeader(vulcCore.POAHeader{Number: &blockNumber}) - blockChain = eth.NewBlockChain(mockClient, mockRpcClient, node, fakes.NewMockTransactionConverter()) - - _, err := blockChain.GetHeaderByNumber(100) - - Expect(err).NotTo(HaveOccurred()) - mockRpcClient.AssertCallContextCalledWith(context.Background(), &vulcCore.POAHeader{}, "eth_getBlockByNumber") - }) - - It("returns err if rpcClient returns err", func() { - node.NetworkID = string(vulcCore.KOVAN_NETWORK_ID) - mockRpcClient.SetCallContextErr(fakes.FakeError) - blockChain = eth.NewBlockChain(mockClient, mockRpcClient, node, fakes.NewMockTransactionConverter()) - - _, err := blockChain.GetHeaderByNumber(100) - - Expect(err).To(HaveOccurred()) - Expect(err).To(MatchError(fakes.FakeError)) - }) - - It("returns error if returned header is empty", func() { - node.NetworkID = string(vulcCore.KOVAN_NETWORK_ID) - blockChain = eth.NewBlockChain(mockClient, mockRpcClient, node, fakes.NewMockTransactionConverter()) - - _, err := blockChain.GetHeaderByNumber(100) - - Expect(err).To(HaveOccurred()) - Expect(err).To(MatchError(eth.ErrEmptyHeader)) - }) - - It("returns multiple headers with multiple blocknumbers", func() { - node.NetworkID = string(vulcCore.KOVAN_NETWORK_ID) - blockNumber := hexutil.Big(*big.NewInt(100)) - mockRpcClient.SetReturnPOAHeaders([]vulcCore.POAHeader{{Number: &blockNumber}}) - - _, err := blockChain.GetHeadersByNumbers([]int64{100, 99}) - - Expect(err).NotTo(HaveOccurred()) - mockRpcClient.AssertBatchCalledWith("eth_getBlockByNumber", 2) - }) - }) - }) - - Describe("getting logs with default FilterQuery", func() { - It("fetches logs from ethClient", func() { - mockClient.SetFilterLogsReturnLogs([]types.Log{{}}) - contract := vulcCore.Contract{Hash: common.BytesToHash([]byte{1, 2, 3, 4, 5}).Hex()} - startingBlockNumber := big.NewInt(1) - endingBlockNumber := big.NewInt(2) - - _, err := blockChain.GetFullSyncLogs(contract, startingBlockNumber, endingBlockNumber) - - Expect(err).NotTo(HaveOccurred()) - expectedQuery := ethereum.FilterQuery{ - FromBlock: startingBlockNumber, - ToBlock: endingBlockNumber, - Addresses: []common.Address{common.HexToAddress(contract.Hash)}, - } - mockClient.AssertFilterLogsCalledWith(context.Background(), expectedQuery) - }) - - It("returns err if ethClient returns err", func() { - mockClient.SetFilterLogsErr(fakes.FakeError) - contract := vulcCore.Contract{Hash: common.BytesToHash([]byte{1, 2, 3, 4, 5}).Hex()} - startingBlockNumber := big.NewInt(1) - endingBlockNumber := big.NewInt(2) - - _, err := blockChain.GetFullSyncLogs(contract, startingBlockNumber, endingBlockNumber) - - Expect(err).To(HaveOccurred()) - Expect(err).To(MatchError(fakes.FakeError)) - }) - }) - - Describe("getting logs with a custom FilterQuery", func() { - It("fetches logs from ethClient", func() { - mockClient.SetFilterLogsReturnLogs([]types.Log{{}}) - address := common.HexToAddress("0x") - startingBlockNumber := big.NewInt(1) - endingBlockNumber := big.NewInt(2) - topic := common.HexToHash("0x") - query := ethereum.FilterQuery{ - FromBlock: startingBlockNumber, - ToBlock: endingBlockNumber, - Addresses: []common.Address{address}, - Topics: [][]common.Hash{{topic}}, - } - - _, err := blockChain.GetEthLogsWithCustomQuery(query) - - Expect(err).NotTo(HaveOccurred()) - mockClient.AssertFilterLogsCalledWith(context.Background(), query) - }) - - It("returns err if ethClient returns err", func() { - mockClient.SetFilterLogsErr(fakes.FakeError) - contract := vulcCore.Contract{Hash: common.BytesToHash([]byte{1, 2, 3, 4, 5}).Hex()} - startingBlockNumber := big.NewInt(1) - endingBlockNumber := big.NewInt(2) - query := ethereum.FilterQuery{ - FromBlock: startingBlockNumber, - ToBlock: endingBlockNumber, - Addresses: []common.Address{common.HexToAddress(contract.Hash)}, - Topics: nil, - } - - _, err := blockChain.GetEthLogsWithCustomQuery(query) - - Expect(err).To(HaveOccurred()) - Expect(err).To(MatchError(fakes.FakeError)) - }) - }) - - Describe("getting transactions", func() { - It("fetches transaction for each hash", func() { - _, err := blockChain.GetTransactions([]common.Hash{{}, {}}) - - Expect(err).NotTo(HaveOccurred()) - mockRpcClient.AssertBatchCalledWith("eth_getTransactionByHash", 2) - }) - - It("converts transaction indexes from hex to int", func() { - _, err := blockChain.GetTransactions([]common.Hash{{}, {}}) - - Expect(err).NotTo(HaveOccurred()) - Expect(mockTransactionConverter.ConvertHeaderTransactionIndexToIntCalled).To(BeTrue()) - }) - }) - - Describe("getting the most recent block number", func() { - It("fetches latest header from ethClient", func() { - blockNumber := int64(100) - mockClient.SetHeaderByNumberReturnHeader(&types.Header{Number: big.NewInt(blockNumber)}) - - result, err := blockChain.LastBlock() - Expect(err).NotTo(HaveOccurred()) - - mockClient.AssertHeaderByNumberCalledWith(context.Background(), nil) - Expect(result).To(Equal(big.NewInt(blockNumber))) - }) - }) - - Describe("getting an account balance", func() { - It("fetches the balance for a given account address at a given block height", func() { - balance := big.NewInt(100000) - mockClient.SetBalanceAt(balance) - - result, err := blockChain.GetAccountBalance(common.HexToAddress("0x40"), big.NewInt(100)) - Expect(err).NotTo(HaveOccurred()) - - mockClient.AssertBalanceAtCalled(context.Background(), common.HexToAddress("0x40"), big.NewInt(100)) - Expect(result).To(Equal(balance)) - }) - - It("fails if the client returns an error", func() { - balance := big.NewInt(100000) - mockClient.SetBalanceAt(balance) - setErr := errors.New("testError") - mockClient.SetBalanceAtErr(setErr) - - _, err := blockChain.GetAccountBalance(common.HexToAddress("0x40"), big.NewInt(100)) - Expect(err).To(HaveOccurred()) - Expect(err).To(Equal(setErr)) - }) - }) -}) diff --git a/pkg/super_node/eth/cid_retriever.go b/pkg/eth/cid_retriever.go similarity index 99% rename from pkg/super_node/eth/cid_retriever.go rename to pkg/eth/cid_retriever.go index d87541c7..f24f6c0c 100644 --- a/pkg/super_node/eth/cid_retriever.go +++ b/pkg/eth/cid_retriever.go @@ -27,9 +27,9 @@ import ( "github.com/lib/pq" log "github.com/sirupsen/logrus" - utils "github.com/vulcanize/vulcanizedb/libraries/shared/utilities" - "github.com/vulcanize/vulcanizedb/pkg/postgres" - "github.com/vulcanize/vulcanizedb/pkg/super_node/shared" + "github.com/vulcanize/ipfs-chain-watcher/pkg/postgres" + "github.com/vulcanize/ipfs-chain-watcher/pkg/shared" + "github.com/vulcanize/ipfs-chain-watcher/utils" ) // CIDRetriever satisfies the CIDRetriever interface for ethereum diff --git a/pkg/super_node/eth/cid_retriever_test.go b/pkg/eth/cid_retriever_test.go similarity index 98% rename from pkg/super_node/eth/cid_retriever_test.go rename to pkg/eth/cid_retriever_test.go index ecbf7311..50d39071 100644 --- a/pkg/super_node/eth/cid_retriever_test.go +++ b/pkg/eth/cid_retriever_test.go @@ -24,11 +24,11 @@ import ( . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" - "github.com/vulcanize/vulcanizedb/pkg/postgres" - "github.com/vulcanize/vulcanizedb/pkg/super_node/eth" - eth2 "github.com/vulcanize/vulcanizedb/pkg/super_node/eth" - "github.com/vulcanize/vulcanizedb/pkg/super_node/eth/mocks" - "github.com/vulcanize/vulcanizedb/pkg/super_node/shared" + "github.com/vulcanize/ipfs-chain-watcher/pkg/eth" + eth2 "github.com/vulcanize/ipfs-chain-watcher/pkg/eth" + "github.com/vulcanize/ipfs-chain-watcher/pkg/eth/mocks" + "github.com/vulcanize/ipfs-chain-watcher/pkg/postgres" + "github.com/vulcanize/ipfs-chain-watcher/pkg/shared" ) var ( diff --git a/pkg/super_node/eth/cleaner.go b/pkg/eth/cleaner.go similarity index 98% rename from pkg/super_node/eth/cleaner.go rename to pkg/eth/cleaner.go index 9ef11854..a08f9694 100644 --- a/pkg/super_node/eth/cleaner.go +++ b/pkg/eth/cleaner.go @@ -22,8 +22,8 @@ import ( "github.com/jmoiron/sqlx" "github.com/sirupsen/logrus" - "github.com/vulcanize/vulcanizedb/pkg/postgres" - "github.com/vulcanize/vulcanizedb/pkg/super_node/shared" + "github.com/vulcanize/ipfs-chain-watcher/pkg/postgres" + "github.com/vulcanize/ipfs-chain-watcher/pkg/shared" ) // Cleaner satisfies the shared.Cleaner interface fo ethereum diff --git a/pkg/super_node/eth/cleaner_test.go b/pkg/eth/cleaner_test.go similarity index 96% rename from pkg/super_node/eth/cleaner_test.go rename to pkg/eth/cleaner_test.go index b7b8ad2f..1359caa7 100644 --- a/pkg/super_node/eth/cleaner_test.go +++ b/pkg/eth/cleaner_test.go @@ -24,10 +24,9 @@ import ( . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" - "github.com/vulcanize/vulcanizedb/pkg/postgres" - "github.com/vulcanize/vulcanizedb/pkg/super_node/eth" - eth2 "github.com/vulcanize/vulcanizedb/pkg/super_node/eth" - "github.com/vulcanize/vulcanizedb/pkg/super_node/shared" + "github.com/vulcanize/ipfs-chain-watcher/pkg/eth" + "github.com/vulcanize/ipfs-chain-watcher/pkg/postgres" + "github.com/vulcanize/ipfs-chain-watcher/pkg/shared" ) var ( @@ -39,7 +38,7 @@ var ( parentHash = crypto.Keccak256Hash([]byte{00, 01}) totalDifficulty = "50000000000000000000" reward = "5000000000000000000" - headerModel = eth2.HeaderModel{ + headerModel = eth.HeaderModel{ BlockHash: blockHash1.String(), BlockNumber: blocKNumber1.String(), CID: headerCID1, @@ -55,7 +54,7 @@ var ( tx2Hash = crypto.Keccak256Hash([]byte{01, 02}) txSrc = common.HexToAddress("0x010a") txDst = common.HexToAddress("0x020a") - txModels1 = []eth2.TxModel{ + txModels1 = []eth.TxModel{ { CID: tx1CID, TxHash: tx1Hash.String(), @@ -73,7 +72,7 @@ var ( uncleHash = crypto.Keccak256Hash([]byte{02, 02}) uncleParentHash = crypto.Keccak256Hash([]byte{02, 01}) uncleReward = "1000000000000000000" - uncleModels1 = []eth2.UncleModel{ + uncleModels1 = []eth.UncleModel{ { CID: uncleCID, Reward: uncleReward, @@ -87,7 +86,7 @@ var ( rct2CID = "mockRct2CID" rct1Contract = common.Address{} rct2Contract = common.HexToAddress("0x010c") - receiptModels1 = map[common.Hash]eth2.ReceiptModel{ + receiptModels1 = map[common.Hash]eth.ReceiptModel{ tx1Hash: { CID: rct1CID, ContractHash: crypto.Keccak256Hash(rct1Contract.Bytes()).String(), @@ -105,7 +104,7 @@ var ( state2CID1 = "mockState2CID1" state2Path = []byte{'\x02'} state2Key = crypto.Keccak256Hash(txDst.Bytes()) - stateModels1 = []eth2.StateNodeModel{ + stateModels1 = []eth.StateNodeModel{ { CID: state1CID1, Path: state1Path, @@ -124,7 +123,7 @@ var ( storageCID = "mockStorageCID1" storagePath = []byte{'\x01'} storageKey = crypto.Keccak256Hash(common.Hex2Bytes("0x0000000000000000000000000000000000000000000000000000000000000000")) - storageModels1 = map[string][]eth2.StorageNodeModel{ + storageModels1 = map[string][]eth.StorageNodeModel{ common.Bytes2Hex(state1Path): { { CID: storageCID, @@ -148,7 +147,7 @@ var ( blockHash2 = crypto.Keccak256Hash([]byte{00, 03}) blocKNumber2 = big.NewInt(1) headerCID2 = "mockHeaderCID2" - headerModel2 = eth2.HeaderModel{ + headerModel2 = eth.HeaderModel{ BlockHash: blockHash2.String(), BlockNumber: blocKNumber2.String(), CID: headerCID2, @@ -159,7 +158,7 @@ var ( // tx variables tx3CID = "mockTx3CID" tx3Hash = crypto.Keccak256Hash([]byte{01, 03}) - txModels2 = []eth2.TxModel{ + txModels2 = []eth.TxModel{ { CID: tx3CID, TxHash: tx3Hash.String(), @@ -168,7 +167,7 @@ var ( } // receipt variables rct3CID = "mockRct3CID" - receiptModels2 = map[common.Hash]eth2.ReceiptModel{ + receiptModels2 = map[common.Hash]eth.ReceiptModel{ tx3Hash: { CID: rct3CID, ContractHash: crypto.Keccak256Hash(rct1Contract.Bytes()).String(), @@ -177,7 +176,7 @@ var ( // state variables state1CID2 = "mockState1CID2" - stateModels2 = []eth2.StateNodeModel{ + stateModels2 = []eth.StateNodeModel{ { CID: state1CID2, Path: state1Path, @@ -213,15 +212,15 @@ var ( var _ = Describe("Cleaner", func() { var ( db *postgres.DB - repo *eth2.CIDIndexer - cleaner *eth2.Cleaner + repo *eth.CIDIndexer + cleaner *eth.Cleaner ) BeforeEach(func() { var err error db, err = shared.SetupDB() Expect(err).ToNot(HaveOccurred()) - repo = eth2.NewCIDIndexer(db) - cleaner = eth2.NewCleaner(db) + repo = eth.NewCIDIndexer(db) + cleaner = eth.NewCleaner(db) }) Describe("Clean", func() { BeforeEach(func() { diff --git a/pkg/eth/contract.go b/pkg/eth/contract.go deleted file mode 100644 index 768ccb12..00000000 --- a/pkg/eth/contract.go +++ /dev/null @@ -1,61 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package eth - -import ( - "context" - "errors" - "math/big" - - "github.com/ethereum/go-ethereum" - "github.com/ethereum/go-ethereum/common" -) - -var ( - ErrInvalidStateAttribute = errors.New("invalid state attribute") -) - -func (blockChain *BlockChain) FetchContractData(abiJSON string, address string, method string, methodArgs []interface{}, result interface{}, blockNumber int64) error { - parsed, err := ParseAbi(abiJSON) - if err != nil { - return err - } - var input []byte - if methodArgs != nil { - input, err = parsed.Pack(method, methodArgs...) - } else { - input, err = parsed.Pack(method) - } - if err != nil { - return err - } - var bn *big.Int - if blockNumber > 0 { - bn = big.NewInt(blockNumber) - } - output, err := blockChain.callContract(address, input, bn) - if err != nil { - return err - } - return parsed.Unpack(result, method, output) -} - -func (blockChain *BlockChain) callContract(contractHash string, input []byte, blockNumber *big.Int) ([]byte, error) { - to := common.HexToAddress(contractHash) - msg := ethereum.CallMsg{To: &to, Data: input} - return blockChain.ethClient.CallContract(context.Background(), msg, blockNumber) -} diff --git a/pkg/eth/contract_watcher/header/converter/converter.go b/pkg/eth/contract_watcher/header/converter/converter.go deleted file mode 100644 index 534451df..00000000 --- a/pkg/eth/contract_watcher/header/converter/converter.go +++ /dev/null @@ -1,227 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package converter - -import ( - "encoding/json" - "fmt" - "math/big" - "strconv" - - "github.com/ethereum/go-ethereum/accounts/abi/bind" - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/common/hexutil" - gethTypes "github.com/ethereum/go-ethereum/core/types" - - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/contract" - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/types" -) - -// ConverterInterface is the interface for converting geth logs to our custom log type -type ConverterInterface interface { - Convert(logs []gethTypes.Log, event types.Event, headerID int64) ([]types.Log, error) - ConvertBatch(logs []gethTypes.Log, events map[string]types.Event, headerID int64) (map[string][]types.Log, error) - Update(info *contract.Contract) -} - -// Converter is the underlying struct for the ConverterInterface -type Converter struct { - ContractInfo *contract.Contract -} - -// Update is used to configure the converter with a specific contract -func (c *Converter) Update(info *contract.Contract) { - c.ContractInfo = info -} - -// Convert the given watched event log into a types.Log for the given event -func (c *Converter) Convert(logs []gethTypes.Log, event types.Event, headerID int64) ([]types.Log, error) { - boundContract := bind.NewBoundContract(common.HexToAddress(c.ContractInfo.Address), c.ContractInfo.ParsedAbi, nil, nil, nil) - returnLogs := make([]types.Log, 0, len(logs)) - for _, log := range logs { - values := make(map[string]interface{}) - for _, field := range event.Fields { - var i interface{} - values[field.Name] = i - } - - err := boundContract.UnpackLogIntoMap(values, event.Name, log) - if err != nil { - return nil, err - } - - strValues := make(map[string]string, len(values)) - seenAddrs := make([]interface{}, 0, len(values)) - seenHashes := make([]interface{}, 0, len(values)) - for fieldName, input := range values { - // Postgres cannot handle custom types, resolve everything to strings - switch input.(type) { - case *big.Int: - b := input.(*big.Int) - strValues[fieldName] = b.String() - case common.Address: - a := input.(common.Address) - strValues[fieldName] = a.String() - seenAddrs = append(seenAddrs, a) - case common.Hash: - h := input.(common.Hash) - strValues[fieldName] = h.String() - seenHashes = append(seenHashes, h) - case string: - strValues[fieldName] = input.(string) - case bool: - strValues[fieldName] = strconv.FormatBool(input.(bool)) - case []byte: - b := input.([]byte) - strValues[fieldName] = hexutil.Encode(b) - if len(b) == 32 { - seenHashes = append(seenHashes, common.HexToHash(strValues[fieldName])) - } - case uint8: - u := input.(uint8) - strValues[fieldName] = strconv.Itoa(int(u)) - case [32]uint8: - raw := input.([32]uint8) - converted := convertUintSliceToHash(raw) - strValues[fieldName] = converted.String() - seenHashes = append(seenHashes, converted) - default: - return nil, fmt.Errorf("error: unhandled abi type %T", input) - } - } - - // Only hold onto logs that pass our address filter, if any - if c.ContractInfo.PassesEventFilter(strValues) { - raw, err := json.Marshal(log) - if err != nil { - return nil, err - } - - returnLogs = append(returnLogs, types.Log{ - LogIndex: log.Index, - Values: strValues, - Raw: raw, - TransactionIndex: log.TxIndex, - ID: headerID, - }) - - // Cache emitted values if their caching is turned on - if c.ContractInfo.EmittedAddrs != nil { - c.ContractInfo.AddEmittedAddr(seenAddrs...) - } - if c.ContractInfo.EmittedHashes != nil { - c.ContractInfo.AddEmittedHash(seenHashes...) - } - } - } - - return returnLogs, nil -} - -// ConvertBatch converts the given watched event logs into types.Logs; returns a map of event names to a slice of their converted logs -func (c *Converter) ConvertBatch(logs []gethTypes.Log, events map[string]types.Event, headerID int64) (map[string][]types.Log, error) { - boundContract := bind.NewBoundContract(common.HexToAddress(c.ContractInfo.Address), c.ContractInfo.ParsedAbi, nil, nil, nil) - eventsToLogs := make(map[string][]types.Log) - for _, event := range events { - eventsToLogs[event.Name] = make([]types.Log, 0, len(logs)) - // Iterate through all event logs - for _, log := range logs { - // If the log is of this event type, process it as such - if event.Sig() == log.Topics[0] { - values := make(map[string]interface{}) - err := boundContract.UnpackLogIntoMap(values, event.Name, log) - if err != nil { - return nil, err - } - // Postgres cannot handle custom types, so we will resolve everything to strings - strValues := make(map[string]string, len(values)) - // Keep track of addresses and hashes emitted from events - seenAddrs := make([]interface{}, 0, len(values)) - seenHashes := make([]interface{}, 0, len(values)) - for fieldName, input := range values { - switch input.(type) { - case *big.Int: - b := input.(*big.Int) - strValues[fieldName] = b.String() - case common.Address: - a := input.(common.Address) - strValues[fieldName] = a.String() - seenAddrs = append(seenAddrs, a) - case common.Hash: - h := input.(common.Hash) - strValues[fieldName] = h.String() - seenHashes = append(seenHashes, h) - case string: - strValues[fieldName] = input.(string) - case bool: - strValues[fieldName] = strconv.FormatBool(input.(bool)) - case []byte: - b := input.([]byte) - strValues[fieldName] = hexutil.Encode(b) - if len(b) == 32 { // collect byte arrays of size 32 as hashes - seenHashes = append(seenHashes, common.BytesToHash(b)) - } - case uint8: - u := input.(uint8) - strValues[fieldName] = strconv.Itoa(int(u)) - case [32]uint8: - raw := input.([32]uint8) - converted := convertUintSliceToHash(raw) - strValues[fieldName] = converted.String() - seenHashes = append(seenHashes, converted) - default: - return nil, fmt.Errorf("error: unhandled abi type %T", input) - } - } - - // Only hold onto logs that pass our argument filter, if any - if c.ContractInfo.PassesEventFilter(strValues) { - raw, err := json.Marshal(log) - if err != nil { - return nil, err - } - - eventsToLogs[event.Name] = append(eventsToLogs[event.Name], types.Log{ - LogIndex: log.Index, - Values: strValues, - Raw: raw, - TransactionIndex: log.TxIndex, - ID: headerID, - }) - - // Cache emitted values that pass the argument filter if their caching is turned on - if c.ContractInfo.EmittedAddrs != nil { - c.ContractInfo.AddEmittedAddr(seenAddrs...) - } - if c.ContractInfo.EmittedHashes != nil { - c.ContractInfo.AddEmittedHash(seenHashes...) - } - } - } - } - } - - return eventsToLogs, nil -} - -func convertUintSliceToHash(raw [32]uint8) common.Hash { - var asBytes []byte - for _, u := range raw { - asBytes = append(asBytes, u) - } - return common.BytesToHash(asBytes) -} diff --git a/pkg/eth/contract_watcher/header/converter/converter_suite_test.go b/pkg/eth/contract_watcher/header/converter/converter_suite_test.go deleted file mode 100644 index d1c0ab88..00000000 --- a/pkg/eth/contract_watcher/header/converter/converter_suite_test.go +++ /dev/null @@ -1,35 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package converter_test - -import ( - "io/ioutil" - "log" - "testing" - - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" -) - -func TestConverter(t *testing.T) { - RegisterFailHandler(Fail) - RunSpecs(t, "Header Sync Converter Suite Test") -} - -var _ = BeforeSuite(func() { - log.SetOutput(ioutil.Discard) -}) diff --git a/pkg/eth/contract_watcher/header/converter/converter_test.go b/pkg/eth/contract_watcher/header/converter/converter_test.go deleted file mode 100644 index 154f5cb8..00000000 --- a/pkg/eth/contract_watcher/header/converter/converter_test.go +++ /dev/null @@ -1,186 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package converter_test - -import ( - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/core/types" - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/header/converter" - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/contract" - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/helpers" - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/helpers/test_helpers" - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/helpers/test_helpers/mocks" -) - -var _ = Describe("Converter", func() { - var con *contract.Contract - var tusdWantedEvents = []string{"Transfer", "Mint"} - var ensWantedEvents = []string{"NewOwner"} - var marketPlaceWantedEvents = []string{"OrderCreated"} - var molochWantedEvents = []string{"SubmitVote"} - var err error - - Describe("Update", func() { - It("Updates contract info held by the converter", func() { - con = test_helpers.SetupTusdContract(tusdWantedEvents, []string{}) - c := converter.Converter{} - c.Update(con) - Expect(c.ContractInfo).To(Equal(con)) - - info := test_helpers.SetupTusdContract([]string{}, []string{}) - c.Update(info) - Expect(c.ContractInfo).To(Equal(info)) - }) - }) - - Describe("Convert", func() { - It("Converts a watched event log to mapping of event input names to values", func() { - con = test_helpers.SetupTusdContract(tusdWantedEvents, []string{}) - _, ok := con.Events["Approval"] - Expect(ok).To(Equal(false)) - - event, ok := con.Events["Transfer"] - Expect(ok).To(Equal(true)) - - c := converter.Converter{} - c.Update(con) - logs, err := c.Convert([]types.Log{mocks.MockTransferLog1, mocks.MockTransferLog2}, event, 232) - Expect(err).ToNot(HaveOccurred()) - Expect(len(logs)).To(Equal(2)) - - sender1 := common.HexToAddress("0x9dd48110dcc444fdc242510c09bbbbe21a5975cac061d82f7b843bce061ba391") - sender2 := common.HexToAddress("0x000000000000000000000000000000000000000000000000000000000000af21") - value := helpers.BigFromString("1097077688018008265106216665536940668749033598146") - - Expect(logs[0].Values["to"]).To(Equal(sender1.String())) - Expect(logs[0].Values["from"]).To(Equal(sender2.String())) - Expect(logs[0].Values["value"]).To(Equal(value.String())) - Expect(logs[0].ID).To(Equal(int64(232))) - Expect(logs[1].Values["to"]).To(Equal(sender2.String())) - Expect(logs[1].Values["from"]).To(Equal(sender1.String())) - Expect(logs[1].Values["value"]).To(Equal(value.String())) - Expect(logs[1].ID).To(Equal(int64(232))) - }) - - It("Keeps track of addresses it sees if they will be used for method polling", func() { - con = test_helpers.SetupTusdContract(tusdWantedEvents, []string{"balanceOf"}) - event, ok := con.Events["Transfer"] - Expect(ok).To(Equal(true)) - - c := converter.Converter{} - c.Update(con) - _, err := c.Convert([]types.Log{mocks.MockTransferLog1, mocks.MockTransferLog2}, event, 232) - Expect(err).ToNot(HaveOccurred()) - - b, ok := con.EmittedAddrs[common.HexToAddress("0x000000000000000000000000000000000000Af21")] - Expect(ok).To(Equal(true)) - Expect(b).To(Equal(true)) - - b, ok = con.EmittedAddrs[common.HexToAddress("0x09BbBBE21a5975cAc061D82f7b843bCE061BA391")] - Expect(ok).To(Equal(true)) - Expect(b).To(Equal(true)) - - _, ok = con.EmittedAddrs[common.HexToAddress("0x")] - Expect(ok).To(Equal(false)) - - _, ok = con.EmittedAddrs[""] - Expect(ok).To(Equal(false)) - - _, ok = con.EmittedAddrs[common.HexToAddress("0x09THISE21a5IS5cFAKE1D82fAND43bCE06MADEUP")] - Expect(ok).To(Equal(false)) - - _, ok = con.EmittedHashes[common.HexToHash("0x000000000000000000000000c02aaa39b223helloa0e5c4f27ead9083c752553")] - Expect(ok).To(Equal(false)) - }) - - It("Keeps track of hashes it sees if they will be used for method polling", func() { - con = test_helpers.SetupENSContract(ensWantedEvents, []string{"owner"}) - event, ok := con.Events["NewOwner"] - Expect(ok).To(Equal(true)) - - c := converter.Converter{} - c.Update(con) - _, err := c.Convert([]types.Log{mocks.MockNewOwnerLog1, mocks.MockNewOwnerLog2}, event, 232) - Expect(err).ToNot(HaveOccurred()) - Expect(len(con.EmittedHashes)).To(Equal(3)) - - b, ok := con.EmittedHashes[common.HexToHash("0x000000000000000000000000c02aaa39b223helloa0e5c4f27ead9083c752553")] - Expect(ok).To(Equal(true)) - Expect(b).To(Equal(true)) - - b, ok = con.EmittedHashes[common.HexToHash("0x9dd48110dcc444fdc242510c09bbbbe21a5975cac061d82f7b843bce061ba391")] - Expect(ok).To(Equal(true)) - Expect(b).To(Equal(true)) - - b, ok = con.EmittedHashes[common.HexToHash("0x9dd48110dcc444fdc242510c09bbbbe21a5975cac061d82f7b843bce061ba400")] - Expect(ok).To(Equal(true)) - Expect(b).To(Equal(true)) - - _, ok = con.EmittedHashes[common.HexToHash("0x9dd48thiscc444isc242510c0made03upa5975cac061dhashb843bce061ba400")] - Expect(ok).To(Equal(false)) - - _, ok = con.EmittedHashes[common.HexToAddress("0x")] - Expect(ok).To(Equal(false)) - - _, ok = con.EmittedHashes[""] - Expect(ok).To(Equal(false)) - - // Does not keep track of emitted addresses if the methods provided will not use them - _, ok = con.EmittedAddrs[common.HexToAddress("0x000000000000000000000000000000000000Af21")] - Expect(ok).To(Equal(false)) - }) - - It("correctly parses bytes32", func() { - con = test_helpers.SetupMarketPlaceContract(marketPlaceWantedEvents, []string{}) - event, ok := con.Events["OrderCreated"] - Expect(ok).To(BeTrue()) - - c := converter.Converter{} - c.Update(con) - result, err := c.Convert([]types.Log{mocks.MockOrderCreatedLog}, event, 232) - Expect(err).NotTo(HaveOccurred()) - - Expect(len(result)).To(Equal(1)) - Expect(result[0].Values["id"]).To(Equal("0x633f94affdcabe07c000231f85c752c97b9cc43966b432ec4d18641e6d178233")) - }) - - It("correctly parses uint8", func() { - con = test_helpers.SetupMolochContract(molochWantedEvents, []string{}) - event, ok := con.Events["SubmitVote"] - Expect(ok).To(BeTrue()) - - c := converter.Converter{} - c.Update(con) - result, err := c.Convert([]types.Log{mocks.MockSubmitVoteLog}, event, 232) - Expect(err).NotTo(HaveOccurred()) - - Expect(len(result)).To(Equal(1)) - Expect(result[0].Values["uintVote"]).To(Equal("1")) - }) - - It("Fails with an empty contract", func() { - event := con.Events["Transfer"] - c := converter.Converter{} - c.Update(&contract.Contract{}) - _, err = c.Convert([]types.Log{mocks.MockTransferLog1}, event, 232) - Expect(err).To(HaveOccurred()) - }) - }) -}) diff --git a/pkg/eth/contract_watcher/header/fetcher/fetcher.go b/pkg/eth/contract_watcher/header/fetcher/fetcher.go deleted file mode 100644 index 2c250def..00000000 --- a/pkg/eth/contract_watcher/header/fetcher/fetcher.go +++ /dev/null @@ -1,71 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package fetcher - -import ( - "github.com/ethereum/go-ethereum" - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/core/types" - - "github.com/vulcanize/vulcanizedb/pkg/eth/core" -) - -// Fetcher is the fetching interface -type Fetcher interface { - FetchLogs(contractAddresses []string, topics []common.Hash, missingHeader core.Header) ([]types.Log, error) -} - -type fetcher struct { - blockChain core.BlockChain -} - -// NewFetcher returns a new Fetcher -func NewFetcher(blockchain core.BlockChain) Fetcher { - return &fetcher{ - blockChain: blockchain, - } -} - -// FetchLogs checks all topic0s, on all addresses, fetching matching logs for the given header -func (fetcher *fetcher) FetchLogs(contractAddresses []string, topic0s []common.Hash, header core.Header) ([]types.Log, error) { - addresses := hexStringsToAddresses(contractAddresses) - blockHash := common.HexToHash(header.Hash) - query := ethereum.FilterQuery{ - BlockHash: &blockHash, - Addresses: addresses, - // Search for _any_ of the topics in topic0 position; see docs on `FilterQuery` - Topics: [][]common.Hash{topic0s}, - } - - logs, err := fetcher.blockChain.GetEthLogsWithCustomQuery(query) - if err != nil { - // TODO review aggregate fetching error handling - return []types.Log{}, err - } - - return logs, nil -} - -func hexStringsToAddresses(hexStrings []string) []common.Address { - var addresses []common.Address - for _, hexString := range hexStrings { - address := common.HexToAddress(hexString) - addresses = append(addresses, address) - } - - return addresses -} diff --git a/pkg/eth/contract_watcher/header/fetcher/fetcher_suite_test.go b/pkg/eth/contract_watcher/header/fetcher/fetcher_suite_test.go deleted file mode 100644 index 031d15fc..00000000 --- a/pkg/eth/contract_watcher/header/fetcher/fetcher_suite_test.go +++ /dev/null @@ -1,35 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package fetcher_test - -import ( - "io/ioutil" - "log" - "testing" - - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" -) - -func TestFetcher(t *testing.T) { - RegisterFailHandler(Fail) - RunSpecs(t, "Fetcher Suite Test") -} - -var _ = BeforeSuite(func() { - log.SetOutput(ioutil.Discard) -}) diff --git a/pkg/eth/contract_watcher/header/fetcher/fetcher_test.go b/pkg/eth/contract_watcher/header/fetcher/fetcher_test.go deleted file mode 100644 index 1ffcdf2c..00000000 --- a/pkg/eth/contract_watcher/header/fetcher/fetcher_test.go +++ /dev/null @@ -1,66 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package fetcher_test - -import ( - "github.com/ethereum/go-ethereum" - "github.com/ethereum/go-ethereum/common" - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/header/fetcher" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" - "github.com/vulcanize/vulcanizedb/pkg/eth/fakes" -) - -var _ = Describe("Fetcher", func() { - Describe("FetchLogs", func() { - It("fetches logs based on the given query", func() { - blockChain := fakes.NewMockBlockChain() - fetcher := fetcher.NewFetcher(blockChain) - header := fakes.FakeHeader - - addresses := []string{"0xfakeAddress", "0xanotherFakeAddress"} - topicZeros := [][]common.Hash{{common.BytesToHash([]byte{1, 2, 3, 4, 5})}} - - _, err := fetcher.FetchLogs(addresses, []common.Hash{common.BytesToHash([]byte{1, 2, 3, 4, 5})}, header) - - address1 := common.HexToAddress("0xfakeAddress") - address2 := common.HexToAddress("0xanotherFakeAddress") - Expect(err).NotTo(HaveOccurred()) - - blockHash := common.HexToHash(header.Hash) - expectedQuery := ethereum.FilterQuery{ - BlockHash: &blockHash, - Addresses: []common.Address{address1, address2}, - Topics: topicZeros, - } - blockChain.AssertGetEthLogsWithCustomQueryCalledWith(expectedQuery) - }) - - It("returns an error if fetching the logs fails", func() { - blockChain := fakes.NewMockBlockChain() - blockChain.SetGetEthLogsWithCustomQueryErr(fakes.FakeError) - fetcher := fetcher.NewFetcher(blockChain) - - _, err := fetcher.FetchLogs([]string{}, []common.Hash{}, core.Header{}) - - Expect(err).To(HaveOccurred()) - Expect(err).To(MatchError(fakes.FakeError)) - }) - }) -}) diff --git a/pkg/eth/contract_watcher/header/repository/header_repository.go b/pkg/eth/contract_watcher/header/repository/header_repository.go deleted file mode 100644 index 1be851de..00000000 --- a/pkg/eth/contract_watcher/header/repository/header_repository.go +++ /dev/null @@ -1,271 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package repository - -import ( - "fmt" - - "github.com/hashicorp/golang-lru" - "github.com/sirupsen/logrus" - - "github.com/vulcanize/vulcanizedb/pkg/eth/core" - "github.com/vulcanize/vulcanizedb/pkg/postgres" -) - -const columnCacheSize = 1000 - -// HeaderRepository interfaces with the header and checked_headers tables -type HeaderRepository interface { - AddCheckColumn(id string) error - AddCheckColumns(ids []string) error - MarkHeaderChecked(headerID int64, eventID string) error - MarkHeaderCheckedForAll(headerID int64, ids []string) error - MarkHeadersCheckedForAll(headers []core.Header, ids []string) error - MissingHeaders(startingBlockNumber int64, endingBlockNumber int64, eventID string) ([]core.Header, error) - MissingMethodsCheckedEventsIntersection(startingBlockNumber, endingBlockNumber int64, methodIds, eventIds []string) ([]core.Header, error) - MissingHeadersForAll(startingBlockNumber, endingBlockNumber int64, ids []string) ([]core.Header, error) - CheckCache(key string) (interface{}, bool) -} - -type headerRepository struct { - db *postgres.DB - columns *lru.Cache // Cache created columns to minimize db connections -} - -// NewHeaderRepository returns a new HeaderRepository -func NewHeaderRepository(db *postgres.DB) HeaderRepository { - ccs, _ := lru.New(columnCacheSize) - return &headerRepository{ - db: db, - columns: ccs, - } -} - -// AddCheckColumn adds a checked_header column for the provided column id -func (r *headerRepository) AddCheckColumn(id string) error { - // Check cache to see if column already exists before querying pg - _, ok := r.columns.Get(id) - if ok { - return nil - } - - pgStr := "ALTER TABLE public.checked_headers ADD COLUMN IF NOT EXISTS " - pgStr = pgStr + id + " INTEGER NOT NULL DEFAULT 0" - _, err := r.db.Exec(pgStr) - if err != nil { - return err - } - - // Add column name to cache - r.columns.Add(id, true) - - return nil -} - -// AddCheckColumns adds a checked_header column for all of the provided column ids -func (r *headerRepository) AddCheckColumns(ids []string) error { - var err error - baseQuery := "ALTER TABLE public.checked_headers" - input := make([]string, 0, len(ids)) - for _, id := range ids { - _, ok := r.columns.Get(id) - if !ok { - baseQuery += " ADD COLUMN IF NOT EXISTS " + id + " INTEGER NOT NULL DEFAULT 0," - input = append(input, id) - } - } - if len(input) > 0 { - _, err = r.db.Exec(baseQuery[:len(baseQuery)-1]) - if err == nil { - for _, id := range input { - r.columns.Add(id, true) - } - } - } - - return err -} - -// MarkHeaderChecked marks the header checked for the provided column id -func (r *headerRepository) MarkHeaderChecked(headerID int64, id string) error { - _, err := r.db.Exec(`INSERT INTO public.checked_headers (header_id, `+id+`) - VALUES ($1, $2) - ON CONFLICT (header_id) DO - UPDATE SET `+id+` = checked_headers.`+id+` + 1`, headerID, 1) - return err -} - -// MarkHeaderCheckedForAll marks the header checked for all of the provided column ids -func (r *headerRepository) MarkHeaderCheckedForAll(headerID int64, ids []string) error { - pgStr := "INSERT INTO public.checked_headers (header_id, " - for _, id := range ids { - pgStr += id + ", " - } - pgStr = pgStr[:len(pgStr)-2] + ") VALUES ($1, " - for i := 0; i < len(ids); i++ { - pgStr += "1, " - } - pgStr = pgStr[:len(pgStr)-2] + ") ON CONFLICT (header_id) DO UPDATE SET " - for _, id := range ids { - pgStr += id + `= checked_headers.` + id + ` + 1, ` - } - pgStr = pgStr[:len(pgStr)-2] - _, err := r.db.Exec(pgStr, headerID) - return err -} - -// MarkHeadersCheckedForAll marks all of the provided headers checked for each of the provided column ids -func (r *headerRepository) MarkHeadersCheckedForAll(headers []core.Header, ids []string) error { - tx, err := r.db.Beginx() - if err != nil { - return err - } - for _, header := range headers { - pgStr := "INSERT INTO public.checked_headers (header_id, " - for _, id := range ids { - pgStr += id + ", " - } - pgStr = pgStr[:len(pgStr)-2] + ") VALUES ($1, " - for i := 0; i < len(ids); i++ { - pgStr += "1, " - } - pgStr = pgStr[:len(pgStr)-2] + ") ON CONFLICT (header_id) DO UPDATE SET " - for _, id := range ids { - pgStr += fmt.Sprintf("%s = checked_headers.%s + 1, ", id, id) - } - pgStr = pgStr[:len(pgStr)-2] - _, err = tx.Exec(pgStr, header.ID) - if err != nil { - rollbackErr := tx.Rollback() - if rollbackErr != nil { - logrus.Warnf("error rolling back transaction: %s", rollbackErr.Error()) - } - return err - } - } - err = tx.Commit() - return err -} - -// MissingHeaders returns missing headers for the provided checked_headers column id -func (r *headerRepository) MissingHeaders(startingBlockNumber, endingBlockNumber int64, id string) ([]core.Header, error) { - var result []core.Header - var query string - var err error - if endingBlockNumber == -1 { - query = `SELECT headers.id, headers.block_number, headers.hash FROM headers - LEFT JOIN checked_headers on headers.id = header_id - WHERE (header_id ISNULL OR checked_headers.` + id + `=0) - AND headers.block_number >= $1 - AND headers.eth_node_fingerprint = $2 - ORDER BY headers.block_number` - err = r.db.Select(&result, query, startingBlockNumber, r.db.Node.ID) - } else { - query = `SELECT headers.id, headers.block_number, headers.hash FROM headers - LEFT JOIN checked_headers on headers.id = header_id - WHERE (header_id ISNULL OR checked_headers.` + id + `=0) - AND headers.block_number >= $1 - AND headers.block_number <= $2 - AND headers.eth_node_fingerprint = $3 - ORDER BY headers.block_number` - err = r.db.Select(&result, query, startingBlockNumber, endingBlockNumber, r.db.Node.ID) - } - return continuousHeaders(result), err -} - -// MissingHeadersForAll returns missing headers for all of the provided checked_headers column ids -func (r *headerRepository) MissingHeadersForAll(startingBlockNumber, endingBlockNumber int64, ids []string) ([]core.Header, error) { - var result []core.Header - var query string - var err error - baseQuery := `SELECT headers.id, headers.block_number, headers.hash FROM headers - LEFT JOIN checked_headers on headers.id = header_id - WHERE (header_id ISNULL` - for _, id := range ids { - baseQuery += ` OR checked_headers.` + id + `= 0` - } - if endingBlockNumber == -1 { - endStr := `) AND headers.block_number >= $1 - AND headers.eth_node_fingerprint = $2 - ORDER BY headers.block_number` - query = baseQuery + endStr - err = r.db.Select(&result, query, startingBlockNumber, r.db.Node.ID) - } else { - endStr := `) AND headers.block_number >= $1 - AND headers.block_number <= $2 - AND headers.eth_node_fingerprint = $3 - ORDER BY headers.block_number` - query = baseQuery + endStr - err = r.db.Select(&result, query, startingBlockNumber, endingBlockNumber, r.db.Node.ID) - } - return continuousHeaders(result), err -} - -// MissingMethodsCheckedEventsIntersection returns headers that have been checked for all of the provided event ids but not for the provided method ids -func (r *headerRepository) MissingMethodsCheckedEventsIntersection(startingBlockNumber, endingBlockNumber int64, methodIds, eventIds []string) ([]core.Header, error) { - var result []core.Header - var query string - var err error - baseQuery := `SELECT headers.id, headers.block_number, headers.hash FROM headers - LEFT JOIN checked_headers on headers.id = header_id - WHERE (header_id IS NOT NULL` - for _, id := range eventIds { - baseQuery += ` AND ` + id + `!=0` - } - baseQuery += `) AND (` - for _, id := range methodIds { - baseQuery += id + ` =0 AND ` - } - baseQuery = baseQuery[:len(baseQuery)-5] + `) ` - if endingBlockNumber == -1 { - endStr := `AND headers.block_number >= $1 - AND headers.eth_node_fingerprint = $2 - ORDER BY headers.block_number` - query = baseQuery + endStr - err = r.db.Select(&result, query, startingBlockNumber, r.db.Node.ID) - } else { - endStr := `AND headers.block_number >= $1 - AND headers.block_number <= $2 - AND headers.eth_node_fingerprint = $3 - ORDER BY headers.block_number` - query = baseQuery + endStr - err = r.db.Select(&result, query, startingBlockNumber, endingBlockNumber, r.db.Node.ID) - } - return continuousHeaders(result), err -} - -// Returns a continuous set of headers -func continuousHeaders(headers []core.Header) []core.Header { - if len(headers) < 1 { - logrus.Trace("no headers to arrange continuously") - return headers - } - previousHeader := headers[0].BlockNumber - for i := 1; i < len(headers); i++ { - previousHeader++ - if headers[i].BlockNumber != previousHeader { - return headers[:i] - } - } - - return headers -} - -// CheckCache checks the repositories column id cache for a value -func (r *headerRepository) CheckCache(key string) (interface{}, bool) { - return r.columns.Get(key) -} diff --git a/pkg/eth/contract_watcher/header/repository/header_repository_test.go b/pkg/eth/contract_watcher/header/repository/header_repository_test.go deleted file mode 100644 index de517ef3..00000000 --- a/pkg/eth/contract_watcher/header/repository/header_repository_test.go +++ /dev/null @@ -1,371 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package repository_test - -import ( - "fmt" - - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/header/repository" - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/helpers/test_helpers" - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/helpers/test_helpers/mocks" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" - "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres/repositories" - "github.com/vulcanize/vulcanizedb/pkg/postgres" -) - -var _ = Describe("Repository", func() { - var db *postgres.DB - var contractHeaderRepo repository.HeaderRepository // contract_watcher headerSync header repository - var coreHeaderRepo repositories.HeaderRepository // pkg/datastore header repository - var eventIDs = []string{ - "eventName_contractAddr", - "eventName_contractAddr2", - "eventName_contractAddr3", - } - var methodIDs = []string{ - "methodName_contractAddr", - "methodName_contractAddr2", - "methodName_contractAddr3", - } - - BeforeEach(func() { - db, _ = test_helpers.SetupDBandBC() - contractHeaderRepo = repository.NewHeaderRepository(db) - coreHeaderRepo = repositories.NewHeaderRepository(db) - }) - - AfterEach(func() { - test_helpers.TearDown(db) - }) - - Describe("AddCheckColumn", func() { - It("Creates a column for the given eventID to mark if the header has been checked for that event", func() { - query := fmt.Sprintf("SELECT %s FROM checked_headers", eventIDs[0]) - _, err := db.Exec(query) - Expect(err).To(HaveOccurred()) - - err = contractHeaderRepo.AddCheckColumn(eventIDs[0]) - Expect(err).ToNot(HaveOccurred()) - - _, err = db.Exec(query) - Expect(err).ToNot(HaveOccurred()) - }) - - It("Caches column it creates so that it does not need to repeatedly query the database to check for it's existence", func() { - _, ok := contractHeaderRepo.CheckCache(eventIDs[0]) - Expect(ok).To(Equal(false)) - - err := contractHeaderRepo.AddCheckColumn(eventIDs[0]) - Expect(err).ToNot(HaveOccurred()) - - v, ok := contractHeaderRepo.CheckCache(eventIDs[0]) - Expect(ok).To(Equal(true)) - Expect(v).To(Equal(true)) - }) - }) - - Describe("AddCheckColumns", func() { - It("Creates a column for the given eventIDs to mark if the header has been checked for those events", func() { - for _, id := range eventIDs { - _, err := db.Exec(fmt.Sprintf("SELECT %s FROM checked_headers", id)) - Expect(err).To(HaveOccurred()) - } - - err := contractHeaderRepo.AddCheckColumns(eventIDs) - Expect(err).ToNot(HaveOccurred()) - - for _, id := range eventIDs { - _, err := db.Exec(fmt.Sprintf("SELECT %s FROM checked_headers", id)) - Expect(err).ToNot(HaveOccurred()) - } - }) - - It("Caches columns it creates so that it does not need to repeatedly query the database to check for it's existence", func() { - for _, id := range eventIDs { - _, ok := contractHeaderRepo.CheckCache(id) - Expect(ok).To(Equal(false)) - } - - err := contractHeaderRepo.AddCheckColumns(eventIDs) - Expect(err).ToNot(HaveOccurred()) - - for _, id := range eventIDs { - v, ok := contractHeaderRepo.CheckCache(id) - Expect(ok).To(Equal(true)) - Expect(v).To(Equal(true)) - } - }) - }) - - Describe("MissingHeaders", func() { - It("Returns all unchecked headers for the given eventID", func() { - addHeaders(coreHeaderRepo) - err := contractHeaderRepo.AddCheckColumn(eventIDs[0]) - Expect(err).ToNot(HaveOccurred()) - - missingHeaders, err := contractHeaderRepo.MissingHeaders(mocks.MockHeader1.BlockNumber, mocks.MockHeader4.BlockNumber, eventIDs[0]) - Expect(err).ToNot(HaveOccurred()) - Expect(len(missingHeaders)).To(Equal(3)) - }) - - It("Returns unchecked headers in ascending order", func() { - addHeaders(coreHeaderRepo) - err := contractHeaderRepo.AddCheckColumn(eventIDs[0]) - Expect(err).ToNot(HaveOccurred()) - - missingHeaders, err := contractHeaderRepo.MissingHeaders(mocks.MockHeader1.BlockNumber, mocks.MockHeader4.BlockNumber, eventIDs[0]) - Expect(err).ToNot(HaveOccurred()) - Expect(len(missingHeaders)).To(Equal(3)) - - h1 := missingHeaders[0] - h2 := missingHeaders[1] - h3 := missingHeaders[2] - Expect(h1.BlockNumber).To(Equal(mocks.MockHeader1.BlockNumber)) - Expect(h2.BlockNumber).To(Equal(mocks.MockHeader2.BlockNumber)) - Expect(h3.BlockNumber).To(Equal(mocks.MockHeader3.BlockNumber)) - }) - - It("Returns only contiguous chunks of headers", func() { - addDiscontinuousHeaders(coreHeaderRepo) - err := contractHeaderRepo.AddCheckColumns(eventIDs) - Expect(err).ToNot(HaveOccurred()) - - missingHeaders, err := contractHeaderRepo.MissingHeaders(mocks.MockHeader1.BlockNumber, mocks.MockHeader4.BlockNumber, eventIDs[0]) - Expect(err).ToNot(HaveOccurred()) - Expect(len(missingHeaders)).To(Equal(2)) - Expect(missingHeaders[0].BlockNumber).To(Equal(mocks.MockHeader1.BlockNumber)) - Expect(missingHeaders[1].BlockNumber).To(Equal(mocks.MockHeader2.BlockNumber)) - }) - - It("Fails if eventID does not yet exist in check_headers table", func() { - addHeaders(coreHeaderRepo) - err := contractHeaderRepo.AddCheckColumn(eventIDs[0]) - Expect(err).ToNot(HaveOccurred()) - - _, err = contractHeaderRepo.MissingHeaders(mocks.MockHeader1.BlockNumber, mocks.MockHeader4.BlockNumber, "notEventId") - Expect(err).To(HaveOccurred()) - }) - }) - - Describe("MissingHeadersForAll", func() { // HERE - It("Returns all headers that have not been checked for all of the ids provided", func() { - addHeaders(coreHeaderRepo) - err := contractHeaderRepo.AddCheckColumns(eventIDs) - Expect(err).ToNot(HaveOccurred()) - - missingHeaders, err := contractHeaderRepo.MissingHeadersForAll(mocks.MockHeader1.BlockNumber, mocks.MockHeader4.BlockNumber, eventIDs) - Expect(err).ToNot(HaveOccurred()) - Expect(len(missingHeaders)).To(Equal(3)) - - err = contractHeaderRepo.MarkHeaderChecked(missingHeaders[0].ID, eventIDs[0]) - Expect(err).ToNot(HaveOccurred()) - - missingHeaders, err = contractHeaderRepo.MissingHeadersForAll(mocks.MockHeader1.BlockNumber, mocks.MockHeader4.BlockNumber, eventIDs) - Expect(err).ToNot(HaveOccurred()) - Expect(len(missingHeaders)).To(Equal(3)) - - err = contractHeaderRepo.MarkHeaderChecked(missingHeaders[0].ID, eventIDs[1]) - Expect(err).ToNot(HaveOccurred()) - err = contractHeaderRepo.MarkHeaderChecked(missingHeaders[0].ID, eventIDs[2]) - Expect(err).ToNot(HaveOccurred()) - - missingHeaders, err = contractHeaderRepo.MissingHeadersForAll(mocks.MockHeader2.BlockNumber, mocks.MockHeader4.BlockNumber, eventIDs) - Expect(err).ToNot(HaveOccurred()) - Expect(len(missingHeaders)).To(Equal(2)) - }) - - It("Returns only contiguous chunks of headers", func() { - addDiscontinuousHeaders(coreHeaderRepo) - err := contractHeaderRepo.AddCheckColumns(eventIDs) - Expect(err).ToNot(HaveOccurred()) - - missingHeaders, err := contractHeaderRepo.MissingHeadersForAll(mocks.MockHeader1.BlockNumber, mocks.MockHeader4.BlockNumber, eventIDs) - Expect(err).ToNot(HaveOccurred()) - Expect(len(missingHeaders)).To(Equal(2)) - Expect(missingHeaders[0].BlockNumber).To(Equal(mocks.MockHeader1.BlockNumber)) - Expect(missingHeaders[1].BlockNumber).To(Equal(mocks.MockHeader2.BlockNumber)) - }) - - It("returns headers after starting header if starting header not missing", func() { - addLaterHeaders(coreHeaderRepo) - err := contractHeaderRepo.AddCheckColumns(eventIDs) - Expect(err).NotTo(HaveOccurred()) - - missingHeaders, err := contractHeaderRepo.MissingHeadersForAll(mocks.MockHeader1.BlockNumber, -1, eventIDs) - Expect(err).ToNot(HaveOccurred()) - Expect(len(missingHeaders)).To(Equal(2)) - Expect(missingHeaders[0].BlockNumber).To(Equal(mocks.MockHeader3.BlockNumber)) - Expect(missingHeaders[1].BlockNumber).To(Equal(mocks.MockHeader4.BlockNumber)) - }) - - It("Fails if one of the eventIDs does not yet exist in check_headers table", func() { - addHeaders(coreHeaderRepo) - err := contractHeaderRepo.AddCheckColumns(eventIDs) - Expect(err).ToNot(HaveOccurred()) - badEventIDs := append(eventIDs, "notEventId") - - _, err = contractHeaderRepo.MissingHeadersForAll(mocks.MockHeader1.BlockNumber, mocks.MockHeader4.BlockNumber, badEventIDs) - Expect(err).To(HaveOccurred()) - }) - }) - - Describe("MarkHeaderChecked", func() { - It("Marks the header checked for the given eventID", func() { - addHeaders(coreHeaderRepo) - err := contractHeaderRepo.AddCheckColumn(eventIDs[0]) - Expect(err).ToNot(HaveOccurred()) - - missingHeaders, err := contractHeaderRepo.MissingHeaders(mocks.MockHeader1.BlockNumber, mocks.MockHeader4.BlockNumber, eventIDs[0]) - Expect(err).ToNot(HaveOccurred()) - Expect(len(missingHeaders)).To(Equal(3)) - - headerID := missingHeaders[0].ID - err = contractHeaderRepo.MarkHeaderChecked(headerID, eventIDs[0]) - Expect(err).ToNot(HaveOccurred()) - - missingHeaders, err = contractHeaderRepo.MissingHeaders(mocks.MockHeader2.BlockNumber, mocks.MockHeader4.BlockNumber, eventIDs[0]) - Expect(err).ToNot(HaveOccurred()) - Expect(len(missingHeaders)).To(Equal(2)) - }) - - It("Fails if eventID does not yet exist in check_headers table", func() { - addHeaders(coreHeaderRepo) - err := contractHeaderRepo.AddCheckColumn(eventIDs[0]) - Expect(err).ToNot(HaveOccurred()) - - missingHeaders, err := contractHeaderRepo.MissingHeaders(mocks.MockHeader1.BlockNumber, mocks.MockHeader4.BlockNumber, eventIDs[0]) - Expect(err).ToNot(HaveOccurred()) - Expect(len(missingHeaders)).To(Equal(3)) - - headerID := missingHeaders[0].ID - err = contractHeaderRepo.MarkHeaderChecked(headerID, "notEventId") - Expect(err).To(HaveOccurred()) - - missingHeaders, err = contractHeaderRepo.MissingHeaders(mocks.MockHeader1.BlockNumber, mocks.MockHeader4.BlockNumber, eventIDs[0]) - Expect(err).ToNot(HaveOccurred()) - Expect(len(missingHeaders)).To(Equal(3)) - }) - }) - - Describe("MarkHeaderCheckedForAll", func() { - It("Marks the header checked for all provided column ids", func() { - addHeaders(coreHeaderRepo) - err := contractHeaderRepo.AddCheckColumns(eventIDs) - Expect(err).ToNot(HaveOccurred()) - - missingHeaders, err := contractHeaderRepo.MissingHeadersForAll(mocks.MockHeader1.BlockNumber, mocks.MockHeader4.BlockNumber, eventIDs) - Expect(err).ToNot(HaveOccurred()) - Expect(len(missingHeaders)).To(Equal(3)) - - headerID := missingHeaders[0].ID - err = contractHeaderRepo.MarkHeaderCheckedForAll(headerID, eventIDs) - Expect(err).ToNot(HaveOccurred()) - - missingHeaders, err = contractHeaderRepo.MissingHeaders(mocks.MockHeader2.BlockNumber, mocks.MockHeader4.BlockNumber, eventIDs[0]) - Expect(err).ToNot(HaveOccurred()) - Expect(len(missingHeaders)).To(Equal(2)) - }) - }) - - Describe("MarkHeadersCheckedForAll", func() { - It("Marks the headers checked for all provided column ids", func() { - addHeaders(coreHeaderRepo) - methodIDs := []string{ - "methodName_contractAddr", - "methodName_contractAddr2", - "methodName_contractAddr3", - } - - var missingHeaders []core.Header - for _, id := range methodIDs { - err := contractHeaderRepo.AddCheckColumn(id) - Expect(err).ToNot(HaveOccurred()) - missingHeaders, err = contractHeaderRepo.MissingHeaders(mocks.MockHeader1.BlockNumber, mocks.MockHeader4.BlockNumber, id) - Expect(err).ToNot(HaveOccurred()) - Expect(len(missingHeaders)).To(Equal(3)) - } - - err := contractHeaderRepo.MarkHeadersCheckedForAll(missingHeaders, methodIDs) - Expect(err).ToNot(HaveOccurred()) - for _, id := range methodIDs { - missingHeaders, err = contractHeaderRepo.MissingHeaders(mocks.MockHeader1.BlockNumber, mocks.MockHeader4.BlockNumber, id) - Expect(err).ToNot(HaveOccurred()) - Expect(len(missingHeaders)).To(Equal(0)) - } - }) - }) - - Describe("MissingMethodsCheckedEventsIntersection", func() { - It("Returns headers that have been checked for all the provided events but have not been checked for all the provided methods", func() { - addHeaders(coreHeaderRepo) - for i, id := range eventIDs { - err := contractHeaderRepo.AddCheckColumn(id) - Expect(err).ToNot(HaveOccurred()) - err = contractHeaderRepo.AddCheckColumn(methodIDs[i]) - Expect(err).ToNot(HaveOccurred()) - } - - missingHeaders, err := contractHeaderRepo.MissingHeaders(mocks.MockHeader1.BlockNumber, mocks.MockHeader4.BlockNumber, eventIDs[0]) - Expect(err).ToNot(HaveOccurred()) - Expect(len(missingHeaders)).To(Equal(3)) - - headerID := missingHeaders[0].ID - headerID2 := missingHeaders[1].ID - for i, id := range eventIDs { - err = contractHeaderRepo.MarkHeaderChecked(headerID, id) - Expect(err).ToNot(HaveOccurred()) - err = contractHeaderRepo.MarkHeaderChecked(headerID2, id) - Expect(err).ToNot(HaveOccurred()) - err = contractHeaderRepo.MarkHeaderChecked(headerID, methodIDs[i]) - Expect(err).ToNot(HaveOccurred()) - } - - intersectionHeaders, err := contractHeaderRepo.MissingMethodsCheckedEventsIntersection(mocks.MockHeader1.BlockNumber, mocks.MockHeader4.BlockNumber, methodIDs, eventIDs) - Expect(err).ToNot(HaveOccurred()) - Expect(len(intersectionHeaders)).To(Equal(1)) - Expect(intersectionHeaders[0].ID).To(Equal(headerID2)) - }) - }) -}) - -func addHeaders(coreHeaderRepo repositories.HeaderRepository) { - _, err := coreHeaderRepo.CreateOrUpdateHeader(mocks.MockHeader1) - Expect(err).NotTo(HaveOccurred()) - _, err = coreHeaderRepo.CreateOrUpdateHeader(mocks.MockHeader2) - Expect(err).NotTo(HaveOccurred()) - _, err = coreHeaderRepo.CreateOrUpdateHeader(mocks.MockHeader3) - Expect(err).NotTo(HaveOccurred()) -} - -func addDiscontinuousHeaders(coreHeaderRepo repositories.HeaderRepository) { - _, err := coreHeaderRepo.CreateOrUpdateHeader(mocks.MockHeader1) - Expect(err).NotTo(HaveOccurred()) - _, err = coreHeaderRepo.CreateOrUpdateHeader(mocks.MockHeader2) - Expect(err).NotTo(HaveOccurred()) - _, err = coreHeaderRepo.CreateOrUpdateHeader(mocks.MockHeader4) - Expect(err).NotTo(HaveOccurred()) -} - -func addLaterHeaders(coreHeaderRepo repositories.HeaderRepository) { - _, err := coreHeaderRepo.CreateOrUpdateHeader(mocks.MockHeader3) - Expect(err).NotTo(HaveOccurred()) - _, err = coreHeaderRepo.CreateOrUpdateHeader(mocks.MockHeader4) - Expect(err).NotTo(HaveOccurred()) -} diff --git a/pkg/eth/contract_watcher/header/repository/repository_suite_test.go b/pkg/eth/contract_watcher/header/repository/repository_suite_test.go deleted file mode 100644 index 87726ebd..00000000 --- a/pkg/eth/contract_watcher/header/repository/repository_suite_test.go +++ /dev/null @@ -1,35 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package repository_test - -import ( - "io/ioutil" - "testing" - - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - "github.com/sirupsen/logrus" -) - -func TestRepository(t *testing.T) { - RegisterFailHandler(Fail) - RunSpecs(t, "Header Sync Repository Suite Test") -} - -var _ = BeforeSuite(func() { - logrus.SetOutput(ioutil.Discard) -}) diff --git a/pkg/eth/contract_watcher/header/retriever/block_retriever.go b/pkg/eth/contract_watcher/header/retriever/block_retriever.go deleted file mode 100644 index 50d218bc..00000000 --- a/pkg/eth/contract_watcher/header/retriever/block_retriever.go +++ /dev/null @@ -1,61 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package retriever - -import ( - "github.com/vulcanize/vulcanizedb/pkg/postgres" -) - -// BlockRetriever is used to retrieve the first block for a given contract and the most recent block -// It requires a vDB synced database with blocks, transactions, receipts, and logs -type BlockRetriever interface { - RetrieveFirstBlock() (int64, error) - RetrieveMostRecentBlock() (int64, error) -} - -type blockRetriever struct { - db *postgres.DB -} - -// NewBlockRetriever returns a new BlockRetriever -func NewBlockRetriever(db *postgres.DB) BlockRetriever { - return &blockRetriever{ - db: db, - } -} - -// RetrieveFirstBlock retrieves block number of earliest header in repo -func (r *blockRetriever) RetrieveFirstBlock() (int64, error) { - var firstBlock int - err := r.db.Get( - &firstBlock, - "SELECT block_number FROM headers ORDER BY block_number LIMIT 1", - ) - - return int64(firstBlock), err -} - -// RetrieveMostRecentBlock retrieves block number of latest header in repo -func (r *blockRetriever) RetrieveMostRecentBlock() (int64, error) { - var lastBlock int - err := r.db.Get( - &lastBlock, - "SELECT block_number FROM headers ORDER BY block_number DESC LIMIT 1", - ) - - return int64(lastBlock), err -} diff --git a/pkg/eth/contract_watcher/header/retriever/block_retriever_test.go b/pkg/eth/contract_watcher/header/retriever/block_retriever_test.go deleted file mode 100644 index a9891100..00000000 --- a/pkg/eth/contract_watcher/header/retriever/block_retriever_test.go +++ /dev/null @@ -1,85 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package retriever_test - -import ( - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/header/retriever" - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/helpers/test_helpers" - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/helpers/test_helpers/mocks" - "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres/repositories" - "github.com/vulcanize/vulcanizedb/pkg/postgres" -) - -var _ = Describe("Block Retriever", func() { - var db *postgres.DB - var r retriever.BlockRetriever - var headerRepository repositories.HeaderRepository - - BeforeEach(func() { - db, _ = test_helpers.SetupDBandBC() - headerRepository = repositories.NewHeaderRepository(db) - r = retriever.NewBlockRetriever(db) - }) - - AfterEach(func() { - test_helpers.TearDown(db) - }) - - Describe("RetrieveFirstBlock", func() { - It("Retrieves block number of earliest header in the database", func() { - _, err := headerRepository.CreateOrUpdateHeader(mocks.MockHeader1) - Expect(err).ToNot(HaveOccurred()) - _, err = headerRepository.CreateOrUpdateHeader(mocks.MockHeader2) - Expect(err).ToNot(HaveOccurred()) - _, err = headerRepository.CreateOrUpdateHeader(mocks.MockHeader3) - Expect(err).ToNot(HaveOccurred()) - - i, err := r.RetrieveFirstBlock() - Expect(err).NotTo(HaveOccurred()) - Expect(i).To(Equal(int64(6194632))) - }) - - It("Fails if no headers can be found in the database", func() { - _, err := r.RetrieveFirstBlock() - Expect(err).To(HaveOccurred()) - }) - }) - - Describe("RetrieveMostRecentBlock", func() { - It("Retrieves the latest header's block number", func() { - _, err := headerRepository.CreateOrUpdateHeader(mocks.MockHeader1) - Expect(err).ToNot(HaveOccurred()) - _, err = headerRepository.CreateOrUpdateHeader(mocks.MockHeader2) - Expect(err).ToNot(HaveOccurred()) - _, err = headerRepository.CreateOrUpdateHeader(mocks.MockHeader3) - Expect(err).ToNot(HaveOccurred()) - - i, err := r.RetrieveMostRecentBlock() - Expect(err).ToNot(HaveOccurred()) - Expect(i).To(Equal(int64(6194634))) - }) - - It("Fails if no headers can be found in the database", func() { - i, err := r.RetrieveMostRecentBlock() - Expect(err).To(HaveOccurred()) - Expect(i).To(Equal(int64(0))) - }) - }) -}) diff --git a/pkg/eth/contract_watcher/header/retriever/retriever_suite_test.go b/pkg/eth/contract_watcher/header/retriever/retriever_suite_test.go deleted file mode 100644 index f6d4967e..00000000 --- a/pkg/eth/contract_watcher/header/retriever/retriever_suite_test.go +++ /dev/null @@ -1,35 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package retriever_test - -import ( - "io/ioutil" - "testing" - - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - "github.com/sirupsen/logrus" -) - -func TestRetriever(t *testing.T) { - RegisterFailHandler(Fail) - RunSpecs(t, "Header Sync Block Number Retriever Suite Test") -} - -var _ = BeforeSuite(func() { - logrus.SetOutput(ioutil.Discard) -}) diff --git a/pkg/eth/contract_watcher/header/transformer/transformer.go b/pkg/eth/contract_watcher/header/transformer/transformer.go deleted file mode 100644 index 7b9c9d2a..00000000 --- a/pkg/eth/contract_watcher/header/transformer/transformer.go +++ /dev/null @@ -1,337 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package transformer - -import ( - "database/sql" - "errors" - "fmt" - "math" - "strings" - - "github.com/ethereum/go-ethereum/common" - gethTypes "github.com/ethereum/go-ethereum/core/types" - "github.com/sirupsen/logrus" - - "github.com/vulcanize/vulcanizedb/pkg/config" - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/header/converter" - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/header/fetcher" - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/header/repository" - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/header/retriever" - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/contract" - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/parser" - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/poller" - srep "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/repository" - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/types" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" - "github.com/vulcanize/vulcanizedb/pkg/postgres" -) - -// Transformer is the top level struct for transforming watched contract data -// Requires a header synced vDB (headers) and a running eth node (or infura) -type Transformer struct { - // Database interfaces - EventRepository srep.EventRepository // Holds transformed watched event log data - HeaderRepository repository.HeaderRepository // Interface for interaction with header repositories - - // Pre-processing interfaces - Parser parser.Parser // Parses events and methods out of contract abi fetched using contract address - Retriever retriever.BlockRetriever // Retrieves first block for contract - - // Processing interfaces - Fetcher fetcher.Fetcher // Fetches event logs, using header hashes - Converter converter.ConverterInterface // Converts watched event logs into custom log - Poller poller.Poller // Polls methods using arguments collected from events and persists them using a method datastore - - // Store contract configuration information - Config config.ContractConfig - - // Store contract info as mapping to contract address - Contracts map[string]*contract.Contract - - // Internally configured transformer variables - contractAddresses []string // Holds all contract addresses, for batch fetching of logs - sortedEventIds map[string][]string // Map to sort event column ids by contract, for post fetch processing and persisting of logs - sortedMethodIds map[string][]string // Map to sort method column ids by contract, for post fetch method polling - eventIds []string // Holds event column ids across all contract, for batch fetching of headers - eventFilters []common.Hash // Holds topic0 hashes across all contracts, for batch fetching of logs - Start int64 // Hold the lowest starting block and the highest ending block -} - -// Order-of-operations: -// 1. Create new transformer -// 2. Load contract addresses and their parameters -// 3. Init -// 4. Execute - -// NewTransformer takes in a contract config, blockchain, and database, and returns a new Transformer -func NewTransformer(con config.ContractConfig, bc core.BlockChain, db *postgres.DB) *Transformer { - - return &Transformer{ - Poller: poller.NewPoller(bc, db, types.HeaderSync), - Fetcher: fetcher.NewFetcher(bc), - Parser: parser.NewParser(con.Network), - HeaderRepository: repository.NewHeaderRepository(db), - Retriever: retriever.NewBlockRetriever(db), - Converter: &converter.Converter{}, - Contracts: map[string]*contract.Contract{}, - EventRepository: srep.NewEventRepository(db, types.HeaderSync), - Config: con, - } -} - -// Init initialized the Transformer -// Use after creating and setting transformer -// Loops over all of the addr => filter sets -// Uses parser to pull event info from abi -// Use this info to generate event filters -func (tr *Transformer) Init() error { - // Initialize internally configured transformer settings - tr.contractAddresses = make([]string, 0) // Holds all contract addresses, for batch fetching of logs - tr.sortedEventIds = make(map[string][]string) // Map to sort event column ids by contract, for post fetch processing and persisting of logs - tr.sortedMethodIds = make(map[string][]string) // Map to sort method column ids by contract, for post fetch method polling - tr.eventIds = make([]string, 0) // Holds event column ids across all contract, for batch fetching of headers - tr.eventFilters = make([]common.Hash, 0) // Holds topic0 hashes across all contracts, for batch fetching of logs - tr.Start = math.MaxInt64 - - // Iterate through all internal contract addresses - for contractAddr := range tr.Config.Addresses { - // Configure Abi - if tr.Config.Abis[contractAddr] == "" { - // If no abi is given in the config, this method will try fetching from internal look-up table and etherscan - parseErr := tr.Parser.Parse(contractAddr) - if parseErr != nil { - return fmt.Errorf("error parsing contract by address: %s", parseErr.Error()) - } - } else { - // If we have an abi from the config, load that into the parser - parseErr := tr.Parser.ParseAbiStr(tr.Config.Abis[contractAddr]) - if parseErr != nil { - return fmt.Errorf("error parsing contract abi: %s", parseErr.Error()) - } - } - - // Get first block and most recent block number in the header repo - firstBlock, retrieveErr := tr.Retriever.RetrieveFirstBlock() - if retrieveErr != nil { - if retrieveErr == sql.ErrNoRows { - logrus.Error(fmt.Errorf("error retrieving first block: %s", retrieveErr.Error())) - firstBlock = 0 - } else { - return fmt.Errorf("error retrieving first block: %s", retrieveErr.Error()) - } - } - - // Set to specified range if it falls within the bounds - if firstBlock < tr.Config.StartingBlocks[contractAddr] { - firstBlock = tr.Config.StartingBlocks[contractAddr] - } - - // Get contract name if it has one - var name = new(string) - pollingErr := tr.Poller.FetchContractData(tr.Parser.Abi(), contractAddr, "name", nil, name, -1) - if pollingErr != nil { - // can't return this error because "name" might not exist on the contract - logrus.Warnf("error fetching contract data: %s", pollingErr.Error()) - } - - // Remove any potential accidental duplicate inputs - eventArgs := map[string]bool{} - for _, arg := range tr.Config.EventArgs[contractAddr] { - eventArgs[arg] = true - } - methodArgs := map[string]bool{} - for _, arg := range tr.Config.MethodArgs[contractAddr] { - methodArgs[arg] = true - } - - // Aggregate info into contract object and store for execution - con := contract.Contract{ - Name: *name, - Network: tr.Config.Network, - Address: contractAddr, - Abi: tr.Parser.Abi(), - ParsedAbi: tr.Parser.ParsedAbi(), - StartingBlock: firstBlock, - Events: tr.Parser.GetEvents(tr.Config.Events[contractAddr]), - Methods: tr.Parser.GetSelectMethods(tr.Config.Methods[contractAddr]), - FilterArgs: eventArgs, - MethodArgs: methodArgs, - Piping: tr.Config.Piping[contractAddr], - }.Init() - tr.Contracts[contractAddr] = con - tr.contractAddresses = append(tr.contractAddresses, con.Address) - - // Create checked_headers columns for each event id and append to list of all event ids - tr.sortedEventIds[con.Address] = make([]string, 0, len(con.Events)) - for _, event := range con.Events { - eventID := strings.ToLower(event.Name + "_" + con.Address) - addColumnErr := tr.HeaderRepository.AddCheckColumn(eventID) - if addColumnErr != nil { - return fmt.Errorf("error adding check column: %s", addColumnErr.Error()) - } - // Keep track of this event id; sorted and unsorted - tr.sortedEventIds[con.Address] = append(tr.sortedEventIds[con.Address], eventID) - tr.eventIds = append(tr.eventIds, eventID) - // Append this event sig to the filters - tr.eventFilters = append(tr.eventFilters, event.Sig()) - } - - // Create checked_headers columns for each method id and append list of all method ids - tr.sortedMethodIds[con.Address] = make([]string, 0, len(con.Methods)) - for _, m := range con.Methods { - methodID := strings.ToLower(m.Name + "_" + con.Address) - addColumnErr := tr.HeaderRepository.AddCheckColumn(methodID) - if addColumnErr != nil { - return fmt.Errorf("error adding check column: %s", addColumnErr.Error()) - } - tr.sortedMethodIds[con.Address] = append(tr.sortedMethodIds[con.Address], methodID) - } - - // Update start to the lowest block - if con.StartingBlock < tr.Start { - tr.Start = con.StartingBlock - } - } - - return nil -} - -// Execute runs the transformation processes -func (tr *Transformer) Execute() error { - if len(tr.Contracts) == 0 { - return errors.New("error: transformer has no initialized contracts") - } - - // Find unchecked headers for all events across all contracts; these are returned in asc order - missingHeaders, missingHeadersErr := tr.HeaderRepository.MissingHeadersForAll(tr.Start, -1, tr.eventIds) - if missingHeadersErr != nil { - return fmt.Errorf("error getting missing headers: %s", missingHeadersErr.Error()) - } - - // Iterate over headers - for _, header := range missingHeaders { - // Set `start` to this header - // This way if we throw an error but don't bring the execution cycle down (how it is currently handled) - // we restart the cycle at this header - tr.Start = header.BlockNumber - // Map to sort batch fetched logs by which contract they belong to, for post fetch processing - sortedLogs := make(map[string][]gethTypes.Log) - // And fetch all event logs across contracts at this header - allLogs, fetchErr := tr.Fetcher.FetchLogs(tr.contractAddresses, tr.eventFilters, header) - if fetchErr != nil { - return fmt.Errorf("error fetching logs: %s", fetchErr.Error()) - } - - // If no logs are found mark the header checked for all of these eventIDs - // and continue to method polling and onto the next iteration - if len(allLogs) < 1 { - markCheckedErr := tr.HeaderRepository.MarkHeaderCheckedForAll(header.ID, tr.eventIds) - if markCheckedErr != nil { - return fmt.Errorf("error marking header checked: %s", markCheckedErr.Error()) - } - pollingErr := tr.methodPolling(header, tr.sortedMethodIds) - if pollingErr != nil { - return fmt.Errorf("error polling methods: %s", pollingErr.Error()) - } - tr.Start = header.BlockNumber + 1 // Empty header; setup to start at the next header - logrus.Tracef("no logs found for block %d, continuing", header.BlockNumber) - continue - } - - for _, log := range allLogs { - addr := strings.ToLower(log.Address.Hex()) - sortedLogs[addr] = append(sortedLogs[addr], log) - } - - // Process logs for each contract - for conAddr, logs := range sortedLogs { - if logs == nil { - logrus.Tracef("no logs found for contract %s at block %d, continuing", conAddr, header.BlockNumber) - continue - } - // Configure converter with this contract - con := tr.Contracts[conAddr] - tr.Converter.Update(con) - - // Convert logs into batches of log mappings (eventName => []types.Logs - convertedLogs, convertErr := tr.Converter.ConvertBatch(logs, con.Events, header.ID) - if convertErr != nil { - return fmt.Errorf("error converting logs: %s", convertErr.Error()) - } - // Cycle through each type of event log and persist them - for eventName, logs := range convertedLogs { - // If logs for this event are empty, mark them checked at this header and continue - if len(logs) < 1 { - logrus.Tracef("no logs found for event %s on contract %s at block %d, continuing", eventName, conAddr, header.BlockNumber) - continue - } - // If logs aren't empty, persist them - persistErr := tr.EventRepository.PersistLogs(logs, con.Events[eventName], con.Address, con.Name) - if persistErr != nil { - return fmt.Errorf("error persisting logs: %s", persistErr.Error()) - } - } - } - - markCheckedErr := tr.HeaderRepository.MarkHeaderCheckedForAll(header.ID, tr.eventIds) - if markCheckedErr != nil { - return fmt.Errorf("error marking header checked: %s", markCheckedErr.Error()) - } - - // Poll contracts at this block height - pollingErr := tr.methodPolling(header, tr.sortedMethodIds) - if pollingErr != nil { - return fmt.Errorf("error polling methods: %s", pollingErr.Error()) - } - // Success; setup to start at the next header - tr.Start = header.BlockNumber + 1 - } - - return nil -} - -// Used to poll contract methods at a given header -func (tr *Transformer) methodPolling(header core.Header, sortedMethodIds map[string][]string) error { - for _, con := range tr.Contracts { - // Skip method polling processes if no methods are specified - // Also don't try to poll methods below this contract's specified starting block - if len(con.Methods) == 0 || header.BlockNumber < con.StartingBlock { - logrus.Tracef("not polling contract: %s", con.Address) - continue - } - - // Poll all methods for this contract at this header - pollingErr := tr.Poller.PollContractAt(*con, header.BlockNumber) - if pollingErr != nil { - return fmt.Errorf("error polling contract %s: %s", con.Address, pollingErr.Error()) - } - - // Mark this header checked for the methods - markCheckedErr := tr.HeaderRepository.MarkHeaderCheckedForAll(header.ID, sortedMethodIds[con.Address]) - if markCheckedErr != nil { - return fmt.Errorf("error marking header checked: %s", markCheckedErr.Error()) - } - } - - return nil -} - -// GetConfig returns the transformers config; satisfies the transformer interface -func (tr *Transformer) GetConfig() config.ContractConfig { - return tr.Config -} diff --git a/pkg/eth/contract_watcher/header/transformer/transformer_suite_test.go b/pkg/eth/contract_watcher/header/transformer/transformer_suite_test.go deleted file mode 100644 index 56bcc01c..00000000 --- a/pkg/eth/contract_watcher/header/transformer/transformer_suite_test.go +++ /dev/null @@ -1,35 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package transformer_test - -import ( - "io/ioutil" - "testing" - - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - "github.com/sirupsen/logrus" -) - -func TestTransformer(t *testing.T) { - RegisterFailHandler(Fail) - RunSpecs(t, "Header Sync Transformer Suite Test") -} - -var _ = BeforeSuite(func() { - logrus.SetOutput(ioutil.Discard) -}) diff --git a/pkg/eth/contract_watcher/header/transformer/transformer_test.go b/pkg/eth/contract_watcher/header/transformer/transformer_test.go deleted file mode 100644 index 478176f1..00000000 --- a/pkg/eth/contract_watcher/header/transformer/transformer_test.go +++ /dev/null @@ -1,138 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package transformer_test - -import ( - "database/sql" - - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/header/retriever" - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/header/transformer" - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/contract" - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/helpers/test_helpers/mocks" - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/parser" - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/poller" - "github.com/vulcanize/vulcanizedb/pkg/eth/fakes" -) - -var _ = Describe("Transformer", func() { - var fakeAddress = "0x1234567890abcdef" - Describe("Init", func() { - It("Initializes transformer's contract objects", func() { - blockRetriever := &fakes.MockHeaderSyncBlockRetriever{} - firstBlock := int64(1) - blockRetriever.FirstBlock = firstBlock - - parsr := &fakes.MockParser{} - fakeAbi := "fake_abi" - parsr.AbiToReturn = fakeAbi - - pollr := &fakes.MockPoller{} - fakeContractName := "fake_contract_name" - pollr.ContractName = fakeContractName - - t := getFakeTransformer(blockRetriever, parsr, pollr) - - err := t.Init() - - Expect(err).ToNot(HaveOccurred()) - - c, ok := t.Contracts[fakeAddress] - Expect(ok).To(Equal(true)) - - Expect(c.StartingBlock).To(Equal(firstBlock)) - Expect(c.Abi).To(Equal(fakeAbi)) - Expect(c.Name).To(Equal(fakeContractName)) - Expect(c.Address).To(Equal(fakeAddress)) - }) - - It("Fails to initialize if first block cannot be fetched from vDB headers table", func() { - blockRetriever := &fakes.MockHeaderSyncBlockRetriever{} - blockRetriever.FirstBlockErr = fakes.FakeError - t := getFakeTransformer(blockRetriever, &fakes.MockParser{}, &fakes.MockPoller{}) - - err := t.Init() - - Expect(err).To(HaveOccurred()) - Expect(err.Error()).To(ContainSubstring(fakes.FakeError.Error())) - }) - }) - - Describe("Execute", func() { - It("Executes contract transformations", func() { - blockRetriever := &fakes.MockHeaderSyncBlockRetriever{} - firstBlock := int64(1) - blockRetriever.FirstBlock = firstBlock - - parsr := &fakes.MockParser{} - fakeAbi := "fake_abi" - parsr.AbiToReturn = fakeAbi - - pollr := &fakes.MockPoller{} - fakeContractName := "fake_contract_name" - pollr.ContractName = fakeContractName - - t := getFakeTransformer(blockRetriever, parsr, pollr) - - err := t.Init() - - Expect(err).ToNot(HaveOccurred()) - - c, ok := t.Contracts[fakeAddress] - Expect(ok).To(Equal(true)) - - Expect(c.StartingBlock).To(Equal(firstBlock)) - Expect(c.Abi).To(Equal(fakeAbi)) - Expect(c.Name).To(Equal(fakeContractName)) - Expect(c.Address).To(Equal(fakeAddress)) - }) - - It("uses first block from config if vDB headers table has no rows", func() { - blockRetriever := &fakes.MockHeaderSyncBlockRetriever{} - blockRetriever.FirstBlockErr = sql.ErrNoRows - t := getFakeTransformer(blockRetriever, &fakes.MockParser{}, &fakes.MockPoller{}) - - err := t.Init() - - Expect(err).ToNot(HaveOccurred()) - }) - - It("returns error if fetching first block fails for other reason", func() { - blockRetriever := &fakes.MockHeaderSyncBlockRetriever{} - blockRetriever.FirstBlockErr = fakes.FakeError - t := getFakeTransformer(blockRetriever, &fakes.MockParser{}, &fakes.MockPoller{}) - - err := t.Init() - - Expect(err).To(HaveOccurred()) - Expect(err.Error()).To(ContainSubstring(fakes.FakeError.Error())) - }) - }) -}) - -func getFakeTransformer(blockRetriever retriever.BlockRetriever, parsr parser.Parser, pollr poller.Poller) transformer.Transformer { - return transformer.Transformer{ - Parser: parsr, - Retriever: blockRetriever, - Poller: pollr, - HeaderRepository: &fakes.MockHeaderSyncHeaderRepository{}, - Contracts: map[string]*contract.Contract{}, - Config: mocks.MockConfig, - } -} diff --git a/pkg/eth/contract_watcher/shared/constants/constants.go b/pkg/eth/contract_watcher/shared/constants/constants.go deleted file mode 100644 index 9e6855d9..00000000 --- a/pkg/eth/contract_watcher/shared/constants/constants.go +++ /dev/null @@ -1,114 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package constants - -import ( - "github.com/ethereum/go-ethereum/common" - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/helpers" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" - "github.com/vulcanize/vulcanizedb/pkg/eth/filters" -) - -// Event enums -type Event int - -const ( - TransferEvent Event = 0 - ApprovalEvent Event = 1 - BurnEvent Event = 2 - MintEvent Event = 3 - NewOwnerEvent Event = 4 -) - -// String returns the string name for an event -func (e Event) String() string { - strings := [...]string{ - "Transfer", - "Approval", - "Burn", - "Mint", - "NewOwner", - } - - if e < TransferEvent || e > NewOwnerEvent { - return "Unknown" - } - - return strings[e] -} - -// Signature returns the keccak256 signature for an event -func (e Event) Signature() string { - strings := [...]string{ - helpers.GenerateSignature("Transfer(address,address,uint256)"), - helpers.GenerateSignature("Approval(address,address,uint256)"), - helpers.GenerateSignature("Burn(address,uint256)"), - helpers.GenerateSignature("Mint(address,uint256)"), - helpers.GenerateSignature("NewOwner(bytes32,bytes32,address)"), - } - - if e < TransferEvent || e > NewOwnerEvent { - return "Unknown" - } - - return strings[e] -} - -// Contract Addresses -var DaiContractAddress = "0x89d24a6b4ccb1b6faa2625fe562bdd9a23260359" -var TusdContractAddress = "0x8dd5fbCe2F6a956C3022bA3663759011Dd51e73E" -var EnsContractAddress = "0x314159265dD8dbb310642f98f50C066173C1259b" -var MarketPlaceContractAddress = "0x8e5660b4Ab70168b5a6fEeA0e0315cb49c8Cd539" -var MolochContractAddress = "0x1fd169A4f5c59ACf79d0Fd5d91D1201EF1Bce9f1" -var PublicResolverAddress = "0x1da022710dF5002339274AaDEe8D58218e9D6AB5" - -// Contract Abis -var DaiAbiString = `[{"constant":true,"inputs":[],"name":"name","outputs":[{"name":"","type":"bytes32"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[],"name":"stop","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[{"name":"guy","type":"address"},{"name":"wad","type":"uint256"}],"name":"approve","outputs":[{"name":"","type":"bool"}],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[{"name":"owner_","type":"address"}],"name":"setOwner","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"totalSupply","outputs":[{"name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"src","type":"address"},{"name":"dst","type":"address"},{"name":"wad","type":"uint256"}],"name":"transferFrom","outputs":[{"name":"","type":"bool"}],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"decimals","outputs":[{"name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"guy","type":"address"},{"name":"wad","type":"uint256"}],"name":"mint","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[{"name":"wad","type":"uint256"}],"name":"burn","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[{"name":"name_","type":"bytes32"}],"name":"setName","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[{"name":"src","type":"address"}],"name":"balanceOf","outputs":[{"name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"stopped","outputs":[{"name":"","type":"bool"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"authority_","type":"address"}],"name":"setAuthority","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"owner","outputs":[{"name":"","type":"address"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"symbol","outputs":[{"name":"","type":"bytes32"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"guy","type":"address"},{"name":"wad","type":"uint256"}],"name":"burn","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[{"name":"wad","type":"uint256"}],"name":"mint","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[{"name":"dst","type":"address"},{"name":"wad","type":"uint256"}],"name":"transfer","outputs":[{"name":"","type":"bool"}],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[{"name":"dst","type":"address"},{"name":"wad","type":"uint256"}],"name":"push","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[{"name":"src","type":"address"},{"name":"dst","type":"address"},{"name":"wad","type":"uint256"}],"name":"move","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[],"name":"start","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"authority","outputs":[{"name":"","type":"address"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"guy","type":"address"}],"name":"approve","outputs":[{"name":"","type":"bool"}],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[{"name":"src","type":"address"},{"name":"guy","type":"address"}],"name":"allowance","outputs":[{"name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"src","type":"address"},{"name":"wad","type":"uint256"}],"name":"pull","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"inputs":[{"name":"symbol_","type":"bytes32"}],"payable":false,"stateMutability":"nonpayable","type":"constructor"},{"anonymous":false,"inputs":[{"indexed":true,"name":"guy","type":"address"},{"indexed":false,"name":"wad","type":"uint256"}],"name":"Mint","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"guy","type":"address"},{"indexed":false,"name":"wad","type":"uint256"}],"name":"Burn","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"authority","type":"address"}],"name":"LogSetAuthority","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"owner","type":"address"}],"name":"LogSetOwner","type":"event"},{"anonymous":true,"inputs":[{"indexed":true,"name":"sig","type":"bytes4"},{"indexed":true,"name":"guy","type":"address"},{"indexed":true,"name":"foo","type":"bytes32"},{"indexed":true,"name":"bar","type":"bytes32"},{"indexed":false,"name":"wad","type":"uint256"},{"indexed":false,"name":"fax","type":"bytes"}],"name":"LogNote","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"src","type":"address"},{"indexed":true,"name":"guy","type":"address"},{"indexed":false,"name":"wad","type":"uint256"}],"name":"Approval","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"src","type":"address"},{"indexed":true,"name":"dst","type":"address"},{"indexed":false,"name":"wad","type":"uint256"}],"name":"Transfer","type":"event"}]` - -var TusdAbiString = `[{"constant":true,"inputs":[],"name":"burnMin","outputs":[{"name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"name","outputs":[{"name":"","type":"string"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"spender","type":"address"},{"name":"value","type":"uint256"}],"name":"approve","outputs":[{"name":"","type":"bool"}],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[{"name":"owner","type":"address"},{"name":"spender","type":"address"}],"name":"delegateAllowance","outputs":[{"name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"burnFeeFlat","outputs":[{"name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"_canReceiveMintWhiteList","type":"address"},{"name":"_canBurnWhiteList","type":"address"},{"name":"_blackList","type":"address"},{"name":"_noFeesList","type":"address"}],"name":"setLists","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[{"name":"token","type":"address"}],"name":"reclaimToken","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"totalSupply","outputs":[{"name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"newContract","type":"address"}],"name":"delegateToNewContract","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[{"name":"_transferFeeNumerator","type":"uint80"},{"name":"_transferFeeDenominator","type":"uint80"},{"name":"_mintFeeNumerator","type":"uint80"},{"name":"_mintFeeDenominator","type":"uint80"},{"name":"_mintFeeFlat","type":"uint256"},{"name":"_burnFeeNumerator","type":"uint80"},{"name":"_burnFeeDenominator","type":"uint80"},{"name":"_burnFeeFlat","type":"uint256"}],"name":"changeStakingFees","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"canReceiveMintWhiteList","outputs":[{"name":"","type":"address"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"from","type":"address"},{"name":"to","type":"address"},{"name":"value","type":"uint256"}],"name":"transferFrom","outputs":[{"name":"","type":"bool"}],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"delegatedFrom","outputs":[{"name":"","type":"address"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"spender","type":"address"},{"name":"value","type":"uint256"},{"name":"origSender","type":"address"}],"name":"delegateApprove","outputs":[{"name":"","type":"bool"}],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[{"name":"contractAddr","type":"address"}],"name":"reclaimContract","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"decimals","outputs":[{"name":"","type":"uint8"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"allowances","outputs":[{"name":"","type":"address"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[],"name":"unpause","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[{"name":"_to","type":"address"},{"name":"_amount","type":"uint256"}],"name":"mint","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[{"name":"_value","type":"uint256"}],"name":"burn","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[{"name":"who","type":"address"}],"name":"delegateBalanceOf","outputs":[{"name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"from","type":"address"},{"name":"to","type":"address"},{"name":"value","type":"uint256"},{"name":"origSender","type":"address"}],"name":"delegateTransferFrom","outputs":[{"name":"","type":"bool"}],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[],"name":"claimOwnership","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[{"name":"sheet","type":"address"}],"name":"setBalanceSheet","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[{"name":"spender","type":"address"},{"name":"addedValue","type":"uint256"},{"name":"origSender","type":"address"}],"name":"delegateIncreaseApproval","outputs":[{"name":"","type":"bool"}],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"burnFeeNumerator","outputs":[{"name":"","type":"uint80"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"canBurnWhiteList","outputs":[{"name":"","type":"address"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"burnMax","outputs":[{"name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"paused","outputs":[{"name":"","type":"bool"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"mintFeeDenominator","outputs":[{"name":"","type":"uint80"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"staker","outputs":[{"name":"","type":"address"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"addr","type":"address"}],"name":"setDelegatedFrom","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[{"name":"spender","type":"address"},{"name":"subtractedValue","type":"uint256"}],"name":"decreaseApproval","outputs":[{"name":"","type":"bool"}],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"noFeesList","outputs":[{"name":"","type":"address"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[{"name":"who","type":"address"}],"name":"balanceOf","outputs":[{"name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"newMin","type":"uint256"},{"name":"newMax","type":"uint256"}],"name":"changeBurnBounds","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"delegateTotalSupply","outputs":[{"name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"balances","outputs":[{"name":"","type":"address"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[],"name":"pause","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[{"name":"_name","type":"string"},{"name":"_symbol","type":"string"}],"name":"changeName","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"mintFeeNumerator","outputs":[{"name":"","type":"uint80"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"owner","outputs":[{"name":"","type":"address"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"transferFeeNumerator","outputs":[{"name":"","type":"uint80"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"spender","type":"address"},{"name":"subtractedValue","type":"uint256"},{"name":"origSender","type":"address"}],"name":"delegateDecreaseApproval","outputs":[{"name":"","type":"bool"}],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"symbol","outputs":[{"name":"","type":"string"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"to","type":"address"},{"name":"value","type":"uint256"},{"name":"origSender","type":"address"}],"name":"delegateTransfer","outputs":[{"name":"","type":"bool"}],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[],"name":"reclaimEther","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[{"name":"to","type":"address"},{"name":"value","type":"uint256"}],"name":"transfer","outputs":[{"name":"","type":"bool"}],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[{"name":"newStaker","type":"address"}],"name":"changeStaker","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[{"name":"account","type":"address"}],"name":"wipeBlacklistedAccount","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[{"name":"from_","type":"address"},{"name":"value_","type":"uint256"},{"name":"data_","type":"bytes"}],"name":"tokenFallback","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"burnFeeDenominator","outputs":[{"name":"","type":"uint80"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"delegate","outputs":[{"name":"","type":"address"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"blackList","outputs":[{"name":"","type":"address"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"transferFeeDenominator","outputs":[{"name":"","type":"uint80"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"mintFeeFlat","outputs":[{"name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"spender","type":"address"},{"name":"addedValue","type":"uint256"}],"name":"increaseApproval","outputs":[{"name":"","type":"bool"}],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[{"name":"_owner","type":"address"},{"name":"spender","type":"address"}],"name":"allowance","outputs":[{"name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"pendingOwner","outputs":[{"name":"","type":"address"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"sheet","type":"address"}],"name":"setAllowanceSheet","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[{"name":"newOwner","type":"address"}],"name":"transferOwnership","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"inputs":[],"payable":false,"stateMutability":"nonpayable","type":"constructor"},{"payable":false,"stateMutability":"nonpayable","type":"fallback"},{"anonymous":false,"inputs":[{"indexed":false,"name":"newMin","type":"uint256"},{"indexed":false,"name":"newMax","type":"uint256"}],"name":"ChangeBurnBoundsEvent","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"to","type":"address"},{"indexed":false,"name":"amount","type":"uint256"}],"name":"Mint","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"account","type":"address"},{"indexed":false,"name":"balance","type":"uint256"}],"name":"WipedAccount","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"newContract","type":"address"}],"name":"DelegatedTo","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"burner","type":"address"},{"indexed":false,"name":"value","type":"uint256"}],"name":"Burn","type":"event"},{"anonymous":false,"inputs":[],"name":"Pause","type":"event"},{"anonymous":false,"inputs":[],"name":"Unpause","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"previousOwner","type":"address"},{"indexed":true,"name":"newOwner","type":"address"}],"name":"OwnershipTransferred","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"owner","type":"address"},{"indexed":true,"name":"spender","type":"address"},{"indexed":false,"name":"value","type":"uint256"}],"name":"Approval","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"from","type":"address"},{"indexed":true,"name":"to","type":"address"},{"indexed":false,"name":"value","type":"uint256"}],"name":"Transfer","type":"event"}]` - -var ENSAbiString = `[{"constant":true,"inputs":[{"name":"node","type":"bytes32"}],"name":"resolver","outputs":[{"name":"","type":"address"}],"payable":false,"type":"function"},{"constant":true,"inputs":[{"name":"node","type":"bytes32"}],"name":"owner","outputs":[{"name":"","type":"address"}],"payable":false,"type":"function"},{"constant":false,"inputs":[{"name":"node","type":"bytes32"},{"name":"label","type":"bytes32"},{"name":"owner","type":"address"}],"name":"setSubnodeOwner","outputs":[],"payable":false,"type":"function"},{"constant":false,"inputs":[{"name":"node","type":"bytes32"},{"name":"ttl","type":"uint64"}],"name":"setTTL","outputs":[],"payable":false,"type":"function"},{"constant":true,"inputs":[{"name":"node","type":"bytes32"}],"name":"ttl","outputs":[{"name":"","type":"uint64"}],"payable":false,"type":"function"},{"constant":false,"inputs":[{"name":"node","type":"bytes32"},{"name":"resolver","type":"address"}],"name":"setResolver","outputs":[],"payable":false,"type":"function"},{"constant":false,"inputs":[{"name":"node","type":"bytes32"},{"name":"owner","type":"address"}],"name":"setOwner","outputs":[],"payable":false,"type":"function"},{"anonymous":false,"inputs":[{"indexed":true,"name":"node","type":"bytes32"},{"indexed":false,"name":"owner","type":"address"}],"name":"Transfer","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"node","type":"bytes32"},{"indexed":true,"name":"label","type":"bytes32"},{"indexed":false,"name":"owner","type":"address"}],"name":"NewOwner","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"node","type":"bytes32"},{"indexed":false,"name":"resolver","type":"address"}],"name":"NewResolver","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"node","type":"bytes32"},{"indexed":false,"name":"ttl","type":"uint64"}],"name":"NewTTL","type":"event"}]` - -var MarketPlaceAbiString = `[{"constant":false,"inputs":[{"name":"_ownerCutPerMillion","type":"uint256"}],"name":"setOwnerCutPerMillion","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[{"name":"_legacyNFTAddress","type":"address"}],"name":"setLegacyNFTAddress","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"ERC721_Interface","outputs":[{"name":"","type":"bytes4"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"InterfaceId_ValidateFingerprint","outputs":[{"name":"","type":"bytes4"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[],"name":"unpause","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"acceptedToken","outputs":[{"name":"","type":"address"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"assetId","type":"uint256"}],"name":"cancelOrder","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"paused","outputs":[{"name":"","type":"bool"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"nftAddress","type":"address"},{"name":"assetId","type":"uint256"}],"name":"cancelOrder","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[{"name":"nftAddress","type":"address"},{"name":"assetId","type":"uint256"},{"name":"priceInWei","type":"uint256"},{"name":"expiresAt","type":"uint256"}],"name":"createOrder","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[],"name":"initialize","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[],"name":"pause","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"owner","outputs":[{"name":"","type":"address"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"nftAddress","type":"address"},{"name":"assetId","type":"uint256"},{"name":"price","type":"uint256"},{"name":"fingerprint","type":"bytes"}],"name":"safeExecuteOrder","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"ownerCutPerMillion","outputs":[{"name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"assetId","type":"uint256"},{"name":"priceInWei","type":"uint256"},{"name":"expiresAt","type":"uint256"}],"name":"createOrder","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"publicationFeeInWei","outputs":[{"name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"nftAddress","type":"address"},{"name":"assetId","type":"uint256"},{"name":"price","type":"uint256"}],"name":"executeOrder","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[{"name":"_publicationFee","type":"uint256"}],"name":"setPublicationFee","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[{"name":"contractName","type":"string"},{"name":"migrationId","type":"string"}],"name":"isMigrated","outputs":[{"name":"","type":"bool"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"_acceptedToken","type":"address"},{"name":"_legacyNFTAddress","type":"address"},{"name":"_owner","type":"address"}],"name":"initialize","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[{"name":"_sender","type":"address"}],"name":"initialize","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"legacyNFTAddress","outputs":[{"name":"","type":"address"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[{"name":"assetId","type":"uint256"}],"name":"auctionByAssetId","outputs":[{"name":"","type":"bytes32"},{"name":"","type":"address"},{"name":"","type":"uint256"},{"name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[{"name":"","type":"address"},{"name":"","type":"uint256"}],"name":"orderByAssetId","outputs":[{"name":"id","type":"bytes32"},{"name":"seller","type":"address"},{"name":"nftAddress","type":"address"},{"name":"price","type":"uint256"},{"name":"expiresAt","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"assetId","type":"uint256"},{"name":"price","type":"uint256"}],"name":"executeOrder","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[{"name":"newOwner","type":"address"}],"name":"transferOwnership","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"anonymous":false,"inputs":[{"indexed":false,"name":"id","type":"bytes32"},{"indexed":true,"name":"assetId","type":"uint256"},{"indexed":true,"name":"seller","type":"address"},{"indexed":false,"name":"nftAddress","type":"address"},{"indexed":false,"name":"priceInWei","type":"uint256"},{"indexed":false,"name":"expiresAt","type":"uint256"}],"name":"OrderCreated","type":"event"},{"anonymous":false,"inputs":[{"indexed":false,"name":"id","type":"bytes32"},{"indexed":true,"name":"assetId","type":"uint256"},{"indexed":true,"name":"seller","type":"address"},{"indexed":false,"name":"nftAddress","type":"address"},{"indexed":false,"name":"totalPrice","type":"uint256"},{"indexed":true,"name":"buyer","type":"address"}],"name":"OrderSuccessful","type":"event"},{"anonymous":false,"inputs":[{"indexed":false,"name":"id","type":"bytes32"},{"indexed":true,"name":"assetId","type":"uint256"},{"indexed":true,"name":"seller","type":"address"},{"indexed":false,"name":"nftAddress","type":"address"}],"name":"OrderCancelled","type":"event"},{"anonymous":false,"inputs":[{"indexed":false,"name":"publicationFee","type":"uint256"}],"name":"ChangedPublicationFee","type":"event"},{"anonymous":false,"inputs":[{"indexed":false,"name":"ownerCutPerMillion","type":"uint256"}],"name":"ChangedOwnerCutPerMillion","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"legacyNFTAddress","type":"address"}],"name":"ChangeLegacyNFTAddress","type":"event"},{"anonymous":false,"inputs":[{"indexed":false,"name":"id","type":"bytes32"},{"indexed":true,"name":"assetId","type":"uint256"},{"indexed":true,"name":"seller","type":"address"},{"indexed":false,"name":"priceInWei","type":"uint256"},{"indexed":false,"name":"expiresAt","type":"uint256"}],"name":"AuctionCreated","type":"event"},{"anonymous":false,"inputs":[{"indexed":false,"name":"id","type":"bytes32"},{"indexed":true,"name":"assetId","type":"uint256"},{"indexed":true,"name":"seller","type":"address"},{"indexed":false,"name":"totalPrice","type":"uint256"},{"indexed":true,"name":"winner","type":"address"}],"name":"AuctionSuccessful","type":"event"},{"anonymous":false,"inputs":[{"indexed":false,"name":"id","type":"bytes32"},{"indexed":true,"name":"assetId","type":"uint256"},{"indexed":true,"name":"seller","type":"address"}],"name":"AuctionCancelled","type":"event"},{"anonymous":false,"inputs":[],"name":"Pause","type":"event"},{"anonymous":false,"inputs":[],"name":"Unpause","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"previousOwner","type":"address"},{"indexed":true,"name":"newOwner","type":"address"}],"name":"OwnershipTransferred","type":"event"},{"anonymous":false,"inputs":[{"indexed":false,"name":"contractName","type":"string"},{"indexed":false,"name":"migrationId","type":"string"}],"name":"Migrated","type":"event"}]` - -var MolochAbiString = `[{"constant":true,"inputs":[],"name":"processingReward","outputs":[{"name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[{"name":"memberAddress","type":"address"},{"name":"proposalIndex","type":"uint256"}],"name":"getMemberProposalVote","outputs":[{"name":"","type":"uint8"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"getCurrentPeriod","outputs":[{"name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[{"name":"","type":"address"}],"name":"members","outputs":[{"name":"delegateKey","type":"address"},{"name":"shares","type":"uint256"},{"name":"exists","type":"bool"},{"name":"highestIndexYesVote","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"totalSharesRequested","outputs":[{"name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"newDelegateKey","type":"address"}],"name":"updateDelegateKey","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"totalShares","outputs":[{"name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[{"name":"","type":"uint256"}],"name":"proposalQueue","outputs":[{"name":"proposer","type":"address"},{"name":"applicant","type":"address"},{"name":"sharesRequested","type":"uint256"},{"name":"startingPeriod","type":"uint256"},{"name":"yesVotes","type":"uint256"},{"name":"noVotes","type":"uint256"},{"name":"processed","type":"bool"},{"name":"didPass","type":"bool"},{"name":"aborted","type":"bool"},{"name":"tokenTribute","type":"uint256"},{"name":"details","type":"string"},{"name":"maxTotalSharesAtYesVote","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[{"name":"","type":"address"}],"name":"memberAddressByDelegateKey","outputs":[{"name":"","type":"address"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"gracePeriodLength","outputs":[{"name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"abortWindow","outputs":[{"name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"getProposalQueueLength","outputs":[{"name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"summoningTime","outputs":[{"name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"votingPeriodLength","outputs":[{"name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"sharesToBurn","type":"uint256"}],"name":"ragequit","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"proposalDeposit","outputs":[{"name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[{"name":"startingPeriod","type":"uint256"}],"name":"hasVotingPeriodExpired","outputs":[{"name":"","type":"bool"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"applicant","type":"address"},{"name":"tokenTribute","type":"uint256"},{"name":"sharesRequested","type":"uint256"},{"name":"details","type":"string"}],"name":"submitProposal","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[{"name":"proposalIndex","type":"uint256"},{"name":"uintVote","type":"uint8"}],"name":"submitVote","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[{"name":"highestIndexYesVote","type":"uint256"}],"name":"canRagequit","outputs":[{"name":"","type":"bool"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"guildBank","outputs":[{"name":"","type":"address"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"dilutionBound","outputs":[{"name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"periodDuration","outputs":[{"name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"approvedToken","outputs":[{"name":"","type":"address"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"proposalIndex","type":"uint256"}],"name":"abort","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[{"name":"proposalIndex","type":"uint256"}],"name":"processProposal","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"inputs":[{"name":"summoner","type":"address"},{"name":"_approvedToken","type":"address"},{"name":"_periodDuration","type":"uint256"},{"name":"_votingPeriodLength","type":"uint256"},{"name":"_gracePeriodLength","type":"uint256"},{"name":"_abortWindow","type":"uint256"},{"name":"_proposalDeposit","type":"uint256"},{"name":"_dilutionBound","type":"uint256"},{"name":"_processingReward","type":"uint256"}],"payable":false,"stateMutability":"nonpayable","type":"constructor"},{"anonymous":false,"inputs":[{"indexed":false,"name":"proposalIndex","type":"uint256"},{"indexed":true,"name":"delegateKey","type":"address"},{"indexed":true,"name":"memberAddress","type":"address"},{"indexed":true,"name":"applicant","type":"address"},{"indexed":false,"name":"tokenTribute","type":"uint256"},{"indexed":false,"name":"sharesRequested","type":"uint256"}],"name":"SubmitProposal","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"proposalIndex","type":"uint256"},{"indexed":true,"name":"delegateKey","type":"address"},{"indexed":true,"name":"memberAddress","type":"address"},{"indexed":false,"name":"uintVote","type":"uint8"}],"name":"SubmitVote","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"proposalIndex","type":"uint256"},{"indexed":true,"name":"applicant","type":"address"},{"indexed":true,"name":"memberAddress","type":"address"},{"indexed":false,"name":"tokenTribute","type":"uint256"},{"indexed":false,"name":"sharesRequested","type":"uint256"},{"indexed":false,"name":"didPass","type":"bool"}],"name":"ProcessProposal","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"memberAddress","type":"address"},{"indexed":false,"name":"sharesToBurn","type":"uint256"}],"name":"Ragequit","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"proposalIndex","type":"uint256"},{"indexed":false,"name":"applicantAddress","type":"address"}],"name":"Abort","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"memberAddress","type":"address"},{"indexed":false,"name":"newDelegateKey","type":"address"}],"name":"UpdateDelegateKey","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"summoner","type":"address"},{"indexed":false,"name":"shares","type":"uint256"}],"name":"SummonComplete","type":"event"}]` - -// ABIs is a look-up table for ABI strings -var ABIs = map[common.Address]string{ - common.HexToAddress("0x314159265dD8dbb310642f98f50C066173C1259b"): ENSAbiString, - common.HexToAddress("0x8dd5fbCe2F6a956C3022bA3663759011Dd51e73E"): TusdAbiString, - common.HexToAddress("0x89d24a6b4ccb1b6faa2625fe562bdd9a23260359"): DaiAbiString, -} - -// Filters -// To add additional filter parameters, filter by other Topics e.g. for a Transfer event filter Topics[1] to filter for a specific 'from' address -var TusdGenericFilters = []filters.LogFilter{ - { - Name: BurnEvent.String(), - FromBlock: 5197514, - ToBlock: -1, - Address: TusdContractAddress, - Topics: core.Topics{BurnEvent.Signature()}, - }, - { - Name: MintEvent.String(), - FromBlock: 5197514, - ToBlock: -1, - Address: TusdContractAddress, - Topics: core.Topics{MintEvent.Signature()}, - }, -} diff --git a/pkg/eth/contract_watcher/shared/constants/interface.go b/pkg/eth/contract_watcher/shared/constants/interface.go deleted file mode 100644 index 7063bb99..00000000 --- a/pkg/eth/contract_watcher/shared/constants/interface.go +++ /dev/null @@ -1,129 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package constants - -import ( - "github.com/ethereum/go-ethereum/common/hexutil" -) - -// SupportsInterfaceABI is the basic abi needed to check which interfaces are adhered to -var SupportsInterfaceABI = `[{"constant":true,"inputs":[{"name":"interfaceID","type":"bytes4"}],"name":"supportsInterface","outputs":[{"name":"","type":"bool"}],"payable":false,"type":"function"}]` - -// Individual event interfaces for constructing ABI from -var AddrChangeInterface = `{"anonymous":false,"inputs":[{"indexed":true,"name":"node","type":"bytes32"},{"indexed":false,"name":"a","type":"address"}],"name":"AddrChanged","type":"event"}` -var ContentChangeInterface = `{"anonymous":false,"inputs":[{"indexed":true,"name":"node","type":"bytes32"},{"indexed":false,"name":"hash","type":"bytes32"}],"name":"ContentChanged","type":"event"}` -var NameChangeInterface = `{"anonymous":false,"inputs":[{"indexed":true,"name":"node","type":"bytes32"},{"indexed":false,"name":"name","type":"string"}],"name":"NameChanged","type":"event"}` -var AbiChangeInterface = `{"anonymous":false,"inputs":[{"indexed":true,"name":"node","type":"bytes32"},{"indexed":true,"name":"contentType","type":"uint256"}],"name":"ABIChanged","type":"event"}` -var PubkeyChangeInterface = `{"anonymous":false,"inputs":[{"indexed":true,"name":"node","type":"bytes32"},{"indexed":false,"name":"x","type":"bytes32"},{"indexed":false,"name":"y","type":"bytes32"}],"name":"PubkeyChanged","type":"event"}` -var TextChangeInterface = `{"anonymous":false,"inputs":[{"indexed":true,"name":"node","type":"bytes32"},{"indexed":false,"name":"indexedKey","type":"string"},{"indexed":false,"name":"key","type":"string"}],"name":"TextChanged","type":"event"}` -var MultihashChangeInterface = `{"anonymous":false,"inputs":[{"indexed":true,"name":"node","type":"bytes32"},{"indexed":false,"name":"hash","type":"bytes"}],"name":"MultihashChanged","type":"event"}` -var ContenthashChangeInterface = `{"anonymous":false,"inputs":[{"indexed":true,"name":"node","type":"bytes32"},{"indexed":false,"name":"hash","type":"bytes"}],"name":"ContenthashChanged","type":"event"}` - -// Resolver interface signatures -type Interface int - -// Interface enums -const ( - MetaSig Interface = iota - AddrChangeSig - ContentChangeSig - NameChangeSig - AbiChangeSig - PubkeyChangeSig - TextChangeSig - MultihashChangeSig - ContentHashChangeSig -) - -// Hex returns the hex signature for an interface -func (e Interface) Hex() string { - strings := [...]string{ - "0x01ffc9a7", - "0x3b3b57de", - "0xd8389dc5", - "0x691f3431", - "0x2203ab56", - "0xc8690233", - "0x59d1d43c", - "0xe89401a1", - "0xbc1c58d1", - } - - if e < MetaSig || e > ContentHashChangeSig { - return "Unknown" - } - - return strings[e] -} - -// Bytes returns the bytes signature for an interface -func (e Interface) Bytes() [4]uint8 { - if e < MetaSig || e > ContentHashChangeSig { - return [4]byte{} - } - - str := e.Hex() - by, _ := hexutil.Decode(str) - var byArray [4]uint8 - for i := 0; i < 4; i++ { - byArray[i] = by[i] - } - - return byArray -} - -// EventSig returns the event signature for an interface -func (e Interface) EventSig() string { - strings := [...]string{ - "", - "AddrChanged(bytes32,address)", - "ContentChanged(bytes32,bytes32)", - "NameChanged(bytes32,string)", - "ABIChanged(bytes32,uint256)", - "PubkeyChanged(bytes32,bytes32,bytes32)", - "TextChanged(bytes32,string,string)", - "MultihashChanged(bytes32,bytes)", - "ContenthashChanged(bytes32,bytes)", - } - - if e < MetaSig || e > ContentHashChangeSig { - return "Unknown" - } - - return strings[e] -} - -// MethodSig returns the method signature for an interface -func (e Interface) MethodSig() string { - strings := [...]string{ - "supportsInterface(bytes4)", - "addr(bytes32)", - "content(bytes32)", - "name(bytes32)", - "ABI(bytes32,uint256)", - "pubkey(bytes32)", - "text(bytes32,string)", - "multihash(bytes32)", - "setContenthash(bytes32,bytes)", - } - - if e < MetaSig || e > ContentHashChangeSig { - return "Unknown" - } - - return strings[e] -} diff --git a/pkg/eth/contract_watcher/shared/contract/contract.go b/pkg/eth/contract_watcher/shared/contract/contract.go deleted file mode 100644 index ef9e9815..00000000 --- a/pkg/eth/contract_watcher/shared/contract/contract.go +++ /dev/null @@ -1,172 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package contract - -import ( - "errors" - - "github.com/ethereum/go-ethereum/accounts/abi" - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/common/hexutil" - - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/types" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" - "github.com/vulcanize/vulcanizedb/pkg/eth/filters" -) - -// Contract object to hold our contract data -type Contract struct { - Name string // Name of the contract - Address string // Address of the contract - Network string // Network on which the contract is deployed; default empty "" is Ethereum mainnet - StartingBlock int64 // Starting block of the contract - Abi string // Abi string - ParsedAbi abi.ABI // Parsed abi - Events map[string]types.Event // List of events to watch - Methods []types.Method // List of methods to poll - Filters map[string]filters.LogFilter // Map of event filters to their event names; used only for full sync watcher - FilterArgs map[string]bool // User-input list of values to filter event logs for - MethodArgs map[string]bool // User-input list of values to limit method polling to - EmittedAddrs map[interface{}]bool // List of all unique addresses collected from converted event logs - EmittedHashes map[interface{}]bool // List of all unique hashes collected from converted event logs - CreateAddrList bool // Whether or not to persist address list to postgres - CreateHashList bool // Whether or not to persist hash list to postgres - Piping bool // Whether or not to pipe method results forward as arguments to subsequent methods -} - -// Init initializes a contract object -// If we will be calling methods that use addr, hash, or byte arrays -// as arguments then we initialize maps to hold these types of values -func (c Contract) Init() *Contract { - for _, method := range c.Methods { - for _, arg := range method.Args { - switch arg.Type.T { - case abi.AddressTy: - c.EmittedAddrs = map[interface{}]bool{} - case abi.HashTy, abi.BytesTy, abi.FixedBytesTy: - c.EmittedHashes = map[interface{}]bool{} - default: - } - } - } - - return &c -} - -// GenerateFilters uses contract info to generate event filters - full sync contract watcher only -func (c *Contract) GenerateFilters() error { - c.Filters = map[string]filters.LogFilter{} - - for name, event := range c.Events { - c.Filters[name] = filters.LogFilter{ - Name: c.Address + "_" + event.Name, - FromBlock: c.StartingBlock, - ToBlock: -1, - Address: common.HexToAddress(c.Address).Hex(), - Topics: core.Topics{event.Sig().Hex()}, - } - } - // If no filters were generated, throw an error (no point in continuing with this contract) - if len(c.Filters) == 0 { - return errors.New("error: no filters created") - } - - return nil -} - -// WantedEventArg returns true if address is in list of arguments to -// filter events for or if no filtering is specified -func (c *Contract) WantedEventArg(arg string) bool { - if c.FilterArgs == nil { - return false - } else if len(c.FilterArgs) == 0 { - return true - } else if a, ok := c.FilterArgs[arg]; ok { - return a - } - - return false -} - -// WantedMethodArg returns true if address is in list of arguments to -// poll methods with or if no filtering is specified -func (c *Contract) WantedMethodArg(arg interface{}) bool { - if c.MethodArgs == nil { - return false - } else if len(c.MethodArgs) == 0 { - return true - } - - // resolve interface to one of the three types we handle as arguments - str := StringifyArg(arg) - - // See if it's hex string has been filtered for - if a, ok := c.MethodArgs[str]; ok { - return a - } - - return false -} - -// PassesEventFilter returns true if any mapping value matches filtered for address or if no filter exists -// Used to check if an event log name-value mapping should be filtered or not -func (c *Contract) PassesEventFilter(args map[string]string) bool { - for _, arg := range args { - if c.WantedEventArg(arg) { - return true - } - } - - return false -} - -// AddEmittedAddr adds event emitted addresses to our list if it passes filter and method polling is on -func (c *Contract) AddEmittedAddr(addresses ...interface{}) { - for _, addr := range addresses { - if c.WantedMethodArg(addr) && c.Methods != nil { - c.EmittedAddrs[addr] = true - } - } -} - -// AddEmittedHash adds event emitted hashes to our list if it passes filter and method polling is on -func (c *Contract) AddEmittedHash(hashes ...interface{}) { - for _, hash := range hashes { - if c.WantedMethodArg(hash) && c.Methods != nil { - c.EmittedHashes[hash] = true - } - } -} - -// StringifyArg resolves a method argument type to string type -func StringifyArg(arg interface{}) (str string) { - switch arg.(type) { - case string: - str = arg.(string) - case common.Address: - a := arg.(common.Address) - str = a.String() - case common.Hash: - a := arg.(common.Hash) - str = a.String() - case []byte: - a := arg.([]byte) - str = hexutil.Encode(a) - } - - return -} diff --git a/pkg/eth/contract_watcher/shared/contract/contract_suite_test.go b/pkg/eth/contract_watcher/shared/contract/contract_suite_test.go deleted file mode 100644 index fa6d1257..00000000 --- a/pkg/eth/contract_watcher/shared/contract/contract_suite_test.go +++ /dev/null @@ -1,35 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package contract_test - -import ( - "io/ioutil" - "log" - "testing" - - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" -) - -func TestContract(t *testing.T) { - RegisterFailHandler(Fail) - RunSpecs(t, "Contract Suite Test") -} - -var _ = BeforeSuite(func() { - log.SetOutput(ioutil.Discard) -}) diff --git a/pkg/eth/contract_watcher/shared/contract/contract_test.go b/pkg/eth/contract_watcher/shared/contract/contract_test.go deleted file mode 100644 index ee3ad7b8..00000000 --- a/pkg/eth/contract_watcher/shared/contract/contract_test.go +++ /dev/null @@ -1,236 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package contract_test - -import ( - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/contract" - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/helpers/test_helpers" - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/helpers/test_helpers/mocks" - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/types" -) - -var _ = Describe("Contract", func() { - var err error - var info *contract.Contract - var wantedEvents = []string{"Transfer", "Approval"} - - Describe("GenerateFilters", func() { - - It("Generates filters from contract data", func() { - info = test_helpers.SetupTusdContract(wantedEvents, nil) - err = info.GenerateFilters() - Expect(err).ToNot(HaveOccurred()) - - val, ok := info.Filters["Transfer"] - Expect(ok).To(Equal(true)) - Expect(val).To(Equal(mocks.ExpectedTransferFilter)) - - val, ok = info.Filters["Approval"] - Expect(ok).To(Equal(true)) - Expect(val).To(Equal(mocks.ExpectedApprovalFilter)) - - val, ok = info.Filters["Mint"] - Expect(ok).To(Equal(false)) - - }) - - It("Fails with an empty contract", func() { - info = &contract.Contract{} - err = info.GenerateFilters() - Expect(err).To(HaveOccurred()) - }) - }) - - Describe("IsEventAddr", func() { - - BeforeEach(func() { - info = &contract.Contract{} - info.MethodArgs = map[string]bool{} - info.FilterArgs = map[string]bool{} - }) - - It("Returns true if address is in event address filter list", func() { - info.FilterArgs["testAddress1"] = true - info.FilterArgs["testAddress2"] = true - - is := info.WantedEventArg("testAddress1") - Expect(is).To(Equal(true)) - is = info.WantedEventArg("testAddress2") - Expect(is).To(Equal(true)) - - info.MethodArgs["testAddress3"] = true - is = info.WantedEventArg("testAddress3") - Expect(is).To(Equal(false)) - }) - - It("Returns true if event address filter is empty (no filter)", func() { - is := info.WantedEventArg("testAddress1") - Expect(is).To(Equal(true)) - is = info.WantedEventArg("testAddress2") - Expect(is).To(Equal(true)) - }) - - It("Returns false if address is not in event address filter list", func() { - info.FilterArgs["testAddress1"] = true - info.FilterArgs["testAddress2"] = true - - is := info.WantedEventArg("testAddress3") - Expect(is).To(Equal(false)) - }) - - It("Returns false if event address filter is nil (block all)", func() { - info.FilterArgs = nil - - is := info.WantedEventArg("testAddress1") - Expect(is).To(Equal(false)) - is = info.WantedEventArg("testAddress2") - Expect(is).To(Equal(false)) - }) - }) - - Describe("IsMethodAddr", func() { - BeforeEach(func() { - info = &contract.Contract{} - info.MethodArgs = map[string]bool{} - info.FilterArgs = map[string]bool{} - }) - - It("Returns true if address is in method address filter list", func() { - info.MethodArgs["testAddress1"] = true - info.MethodArgs["testAddress2"] = true - - is := info.WantedMethodArg("testAddress1") - Expect(is).To(Equal(true)) - is = info.WantedMethodArg("testAddress2") - Expect(is).To(Equal(true)) - - info.FilterArgs["testAddress3"] = true - is = info.WantedMethodArg("testAddress3") - Expect(is).To(Equal(false)) - }) - - It("Returns true if method address filter list is empty (no filter)", func() { - is := info.WantedMethodArg("testAddress1") - Expect(is).To(Equal(true)) - is = info.WantedMethodArg("testAddress2") - Expect(is).To(Equal(true)) - }) - - It("Returns false if address is not in method address filter list", func() { - info.MethodArgs["testAddress1"] = true - info.MethodArgs["testAddress2"] = true - - is := info.WantedMethodArg("testAddress3") - Expect(is).To(Equal(false)) - }) - - It("Returns false if method address filter list is nil (block all)", func() { - info.MethodArgs = nil - - is := info.WantedMethodArg("testAddress1") - Expect(is).To(Equal(false)) - is = info.WantedMethodArg("testAddress2") - Expect(is).To(Equal(false)) - }) - }) - - Describe("PassesEventFilter", func() { - var mapping map[string]string - BeforeEach(func() { - info = &contract.Contract{} - info.FilterArgs = map[string]bool{} - mapping = map[string]string{} - - }) - - It("Return true if event log name-value mapping has filtered for address as a value", func() { - info.FilterArgs["testAddress1"] = true - info.FilterArgs["testAddress2"] = true - - mapping["testInputName1"] = "testAddress1" - mapping["testInputName2"] = "testAddress2" - mapping["testInputName3"] = "testAddress3" - - pass := info.PassesEventFilter(mapping) - Expect(pass).To(Equal(true)) - }) - - It("Return true if event address filter list is empty (no filter)", func() { - mapping["testInputName1"] = "testAddress1" - mapping["testInputName2"] = "testAddress2" - mapping["testInputName3"] = "testAddress3" - - pass := info.PassesEventFilter(mapping) - Expect(pass).To(Equal(true)) - }) - - It("Return false if event log name-value mapping does not have filtered for address as a value", func() { - info.FilterArgs["testAddress1"] = true - info.FilterArgs["testAddress2"] = true - - mapping["testInputName3"] = "testAddress3" - - pass := info.PassesEventFilter(mapping) - Expect(pass).To(Equal(false)) - }) - - It("Return false if event address filter list is nil (block all)", func() { - info.FilterArgs = nil - - mapping["testInputName1"] = "testAddress1" - mapping["testInputName2"] = "testAddress2" - mapping["testInputName3"] = "testAddress3" - - pass := info.PassesEventFilter(mapping) - Expect(pass).To(Equal(false)) - }) - }) - - Describe("AddEmittedAddr", func() { - BeforeEach(func() { - info = &contract.Contract{} - info.FilterArgs = map[string]bool{} - info.MethodArgs = map[string]bool{} - info.Methods = []types.Method{} - info.EmittedAddrs = map[interface{}]bool{} - }) - - It("Adds address to list if it is on the method filter address list", func() { - info.MethodArgs["testAddress2"] = true - info.AddEmittedAddr("testAddress2") - b := info.EmittedAddrs["testAddress2"] - Expect(b).To(Equal(true)) - }) - - It("Adds address to list if method filter is empty", func() { - info.AddEmittedAddr("testAddress2") - b := info.EmittedAddrs["testAddress2"] - Expect(b).To(Equal(true)) - }) - - It("Does not add address to list if both filters are closed (nil)", func() { - info.FilterArgs = nil // close both - info.MethodArgs = nil - info.AddEmittedAddr("testAddress1") - b := info.EmittedAddrs["testAddress1"] - Expect(b).To(Equal(false)) - }) - }) -}) diff --git a/pkg/eth/contract_watcher/shared/fetcher/fetcher.go b/pkg/eth/contract_watcher/shared/fetcher/fetcher.go deleted file mode 100644 index e9e4a035..00000000 --- a/pkg/eth/contract_watcher/shared/fetcher/fetcher.go +++ /dev/null @@ -1,124 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package fetcher - -import ( - "fmt" - "log" - "math/big" - - "github.com/ethereum/go-ethereum/common" - - "github.com/vulcanize/vulcanizedb/pkg/eth/core" -) - -// Fetcher serves as the lower level data fetcher that calls the underlying -// blockchain's FetchConctractData method for a given return type - -// FetcherInterface is the interface definition for a fetcher -type FetcherInterface interface { - FetchBigInt(method, contractAbi, contractAddress string, blockNumber int64, methodArgs []interface{}) (big.Int, error) - FetchBool(method, contractAbi, contractAddress string, blockNumber int64, methodArgs []interface{}) (bool, error) - FetchAddress(method, contractAbi, contractAddress string, blockNumber int64, methodArgs []interface{}) (common.Address, error) - FetchString(method, contractAbi, contractAddress string, blockNumber int64, methodArgs []interface{}) (string, error) - FetchHash(method, contractAbi, contractAddress string, blockNumber int64, methodArgs []interface{}) (common.Hash, error) -} - -// Used to create a new Fetcher error for a given error and fetch method -func newFetcherError(err error, fetchMethod string) *fetcherError { - e := fetcherError{err.Error(), fetchMethod} - log.Println(e.Error()) - return &e -} - -// Fetcher struct -type Fetcher struct { - BlockChain core.BlockChain // Underlying Blockchain -} - -// Fetcher error -type fetcherError struct { - err string - fetchMethod string -} - -// Error method -func (fe *fetcherError) Error() string { - return fmt.Sprintf("Error fetching %s: %s", fe.fetchMethod, fe.err) -} - -// Generic Fetcher methods used by Getters to call contract methods - -// FetchBigInt is the method used to fetch big.Int value from contract -func (f Fetcher) FetchBigInt(method, contractAbi, contractAddress string, blockNumber int64, methodArgs []interface{}) (big.Int, error) { - var result = new(big.Int) - err := f.BlockChain.FetchContractData(contractAbi, contractAddress, method, methodArgs, &result, blockNumber) - - if err != nil { - return *result, newFetcherError(err, method) - } - - return *result, nil -} - -// FetchBool is the method used to fetch bool value from contract -func (f Fetcher) FetchBool(method, contractAbi, contractAddress string, blockNumber int64, methodArgs []interface{}) (bool, error) { - var result = new(bool) - err := f.BlockChain.FetchContractData(contractAbi, contractAddress, method, methodArgs, &result, blockNumber) - - if err != nil { - return *result, newFetcherError(err, method) - } - - return *result, nil -} - -// FetchAddress is the method used to fetch address value from contract -func (f Fetcher) FetchAddress(method, contractAbi, contractAddress string, blockNumber int64, methodArgs []interface{}) (common.Address, error) { - var result = new(common.Address) - err := f.BlockChain.FetchContractData(contractAbi, contractAddress, method, methodArgs, &result, blockNumber) - - if err != nil { - return *result, newFetcherError(err, method) - } - - return *result, nil -} - -// FetchString is the method used to fetch string value from contract -func (f Fetcher) FetchString(method, contractAbi, contractAddress string, blockNumber int64, methodArgs []interface{}) (string, error) { - var result = new(string) - err := f.BlockChain.FetchContractData(contractAbi, contractAddress, method, methodArgs, &result, blockNumber) - - if err != nil { - return *result, newFetcherError(err, method) - } - - return *result, nil -} - -// FetchHash is the method used to fetch hash value from contract -func (f Fetcher) FetchHash(method, contractAbi, contractAddress string, blockNumber int64, methodArgs []interface{}) (common.Hash, error) { - var result = new(common.Hash) - err := f.BlockChain.FetchContractData(contractAbi, contractAddress, method, methodArgs, &result, blockNumber) - - if err != nil { - return *result, newFetcherError(err, method) - } - - return *result, nil -} diff --git a/pkg/eth/contract_watcher/shared/getter/interface_getter.go b/pkg/eth/contract_watcher/shared/getter/interface_getter.go deleted file mode 100644 index cf1d21d4..00000000 --- a/pkg/eth/contract_watcher/shared/getter/interface_getter.go +++ /dev/null @@ -1,113 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package getter - -import ( - "fmt" - - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/constants" - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/fetcher" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" -) - -// InterfaceGetter is used to derive the interface of a contract -type InterfaceGetter interface { - GetABI(resolverAddr string, blockNumber int64) (string, error) - GetBlockChain() core.BlockChain -} - -type interfaceGetter struct { - fetcher.Fetcher -} - -// NewInterfaceGetter returns a new InterfaceGetter -func NewInterfaceGetter(blockChain core.BlockChain) InterfaceGetter { - return &interfaceGetter{ - Fetcher: fetcher.Fetcher{ - BlockChain: blockChain, - }, - } -} - -// GetABI is used to construct a custom ABI based on the results from calling supportsInterface -func (g *interfaceGetter) GetABI(resolverAddr string, blockNumber int64) (string, error) { - a := constants.SupportsInterfaceABI - args := make([]interface{}, 1) - args[0] = constants.MetaSig.Bytes() - supports, err := g.getSupportsInterface(a, resolverAddr, blockNumber, args) - if err != nil { - return "", fmt.Errorf("call to getSupportsInterface failed: %v", err) - } - if !supports { - return "", fmt.Errorf("contract does not support interface") - } - - abiStr := `[` - args[0] = constants.AddrChangeSig.Bytes() - supports, err = g.getSupportsInterface(a, resolverAddr, blockNumber, args) - if err == nil && supports { - abiStr += constants.AddrChangeInterface + "," - } - args[0] = constants.NameChangeSig.Bytes() - supports, err = g.getSupportsInterface(a, resolverAddr, blockNumber, args) - if err == nil && supports { - abiStr += constants.NameChangeInterface + "," - } - args[0] = constants.ContentChangeSig.Bytes() - supports, err = g.getSupportsInterface(a, resolverAddr, blockNumber, args) - if err == nil && supports { - abiStr += constants.ContentChangeInterface + "," - } - args[0] = constants.AbiChangeSig.Bytes() - supports, err = g.getSupportsInterface(a, resolverAddr, blockNumber, args) - if err == nil && supports { - abiStr += constants.AbiChangeInterface + "," - } - args[0] = constants.PubkeyChangeSig.Bytes() - supports, err = g.getSupportsInterface(a, resolverAddr, blockNumber, args) - if err == nil && supports { - abiStr += constants.PubkeyChangeInterface + "," - } - args[0] = constants.ContentHashChangeSig.Bytes() - supports, err = g.getSupportsInterface(a, resolverAddr, blockNumber, args) - if err == nil && supports { - abiStr += constants.ContenthashChangeInterface + "," - } - args[0] = constants.MultihashChangeSig.Bytes() - supports, err = g.getSupportsInterface(a, resolverAddr, blockNumber, args) - if err == nil && supports { - abiStr += constants.MultihashChangeInterface + "," - } - args[0] = constants.TextChangeSig.Bytes() - supports, err = g.getSupportsInterface(a, resolverAddr, blockNumber, args) - if err == nil && supports { - abiStr += constants.TextChangeInterface + "," - } - abiStr = abiStr[:len(abiStr)-1] + `]` - - return abiStr, nil -} - -// Use this method to check whether or not a contract supports a given method/event interface -func (g *interfaceGetter) getSupportsInterface(contractAbi, contractAddress string, blockNumber int64, methodArgs []interface{}) (bool, error) { - return g.Fetcher.FetchBool("supportsInterface", contractAbi, contractAddress, blockNumber, methodArgs) -} - -// GetBlockChain is a method to retrieve the Getter's blockchain -func (g *interfaceGetter) GetBlockChain() core.BlockChain { - return g.Fetcher.BlockChain -} diff --git a/pkg/eth/contract_watcher/shared/helpers/helpers.go b/pkg/eth/contract_watcher/shared/helpers/helpers.go deleted file mode 100644 index 2f440003..00000000 --- a/pkg/eth/contract_watcher/shared/helpers/helpers.go +++ /dev/null @@ -1,72 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package helpers - -import ( - "math/big" - - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/common/hexutil" - "github.com/ethereum/go-ethereum/core/types" - "github.com/ethereum/go-ethereum/crypto" - - "github.com/vulcanize/vulcanizedb/pkg/eth/core" -) - -// ConvertToLog converts a watched event to a log -func ConvertToLog(watchedEvent core.WatchedEvent) types.Log { - allTopics := []string{watchedEvent.Topic0, watchedEvent.Topic1, watchedEvent.Topic2, watchedEvent.Topic3} - var nonNilTopics []string - for _, topic := range allTopics { - if topic != "" { - nonNilTopics = append(nonNilTopics, topic) - } - } - return types.Log{ - Address: common.HexToAddress(watchedEvent.Address), - Topics: createTopics(nonNilTopics...), - Data: hexutil.MustDecode(watchedEvent.Data), - BlockNumber: uint64(watchedEvent.BlockNumber), - TxHash: common.HexToHash(watchedEvent.TxHash), - TxIndex: 0, - BlockHash: common.HexToHash("0x0"), - Index: uint(watchedEvent.Index), - Removed: false, - } -} - -func createTopics(topics ...string) []common.Hash { - var topicsArray []common.Hash - for _, topic := range topics { - topicsArray = append(topicsArray, common.HexToHash(topic)) - } - return topicsArray -} - -// BigFromString creates a big.Int from a string -func BigFromString(n string) *big.Int { - b := new(big.Int) - b.SetString(n, 10) - return b -} - -// GenerateSignature returns the keccak256 hash hex of a string -func GenerateSignature(s string) string { - eventSignature := []byte(s) - hash := crypto.Keccak256Hash(eventSignature) - return hash.Hex() -} diff --git a/pkg/eth/contract_watcher/shared/helpers/test_helpers/database.go b/pkg/eth/contract_watcher/shared/helpers/test_helpers/database.go deleted file mode 100644 index 21dce73e..00000000 --- a/pkg/eth/contract_watcher/shared/helpers/test_helpers/database.go +++ /dev/null @@ -1,243 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package test_helpers - -import ( - "github.com/ethereum/go-ethereum/ethclient" - "github.com/ethereum/go-ethereum/rpc" - . "github.com/onsi/gomega" - - "github.com/vulcanize/vulcanizedb/pkg/config" - "github.com/vulcanize/vulcanizedb/pkg/eth" - "github.com/vulcanize/vulcanizedb/pkg/eth/client" - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/constants" - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/contract" - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/helpers/test_helpers/mocks" - rpc2 "github.com/vulcanize/vulcanizedb/pkg/eth/converters/rpc" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" - "github.com/vulcanize/vulcanizedb/pkg/eth/node" - "github.com/vulcanize/vulcanizedb/pkg/postgres" - "github.com/vulcanize/vulcanizedb/test_config" -) - -type TransferLog struct { - ID int64 `db:"id"` - VulcanizeLogID int64 `db:"vulcanize_log_id"` - TokenName string `db:"token_name"` - Block int64 `db:"block"` - Tx string `db:"tx"` - From string `db:"from_"` - To string `db:"to_"` - Value string `db:"value_"` -} - -type NewOwnerLog struct { - ID int64 `db:"id"` - VulcanizeLogID int64 `db:"vulcanize_log_id"` - TokenName string `db:"token_name"` - Block int64 `db:"block"` - Tx string `db:"tx"` - Node string `db:"node_"` - Label string `db:"label_"` - Owner string `db:"owner_"` -} - -type HeaderSyncTransferLog struct { - ID int64 `db:"id"` - HeaderID int64 `db:"header_id"` - TokenName string `db:"token_name"` - LogIndex int64 `db:"log_idx"` - TxIndex int64 `db:"tx_idx"` - From string `db:"from_"` - To string `db:"to_"` - Value string `db:"value_"` - RawLog []byte `db:"raw_log"` -} - -type HeaderSyncNewOwnerLog struct { - ID int64 `db:"id"` - HeaderID int64 `db:"header_id"` - TokenName string `db:"token_name"` - LogIndex int64 `db:"log_idx"` - TxIndex int64 `db:"tx_idx"` - Node string `db:"node_"` - Label string `db:"label_"` - Owner string `db:"owner_"` - RawLog []byte `db:"raw_log"` -} - -type BalanceOf struct { - ID int64 `db:"id"` - TokenName string `db:"token_name"` - Block int64 `db:"block"` - Address string `db:"who_"` - Balance string `db:"returned"` -} - -type Resolver struct { - ID int64 `db:"id"` - TokenName string `db:"token_name"` - Block int64 `db:"block"` - Node string `db:"node_"` - Address string `db:"returned"` -} - -type Owner struct { - ID int64 `db:"id"` - TokenName string `db:"token_name"` - Block int64 `db:"block"` - Node string `db:"node_"` - Address string `db:"returned"` -} - -func SetupDBandBC() (*postgres.DB, core.BlockChain) { - con := test_config.TestClient - testIPC := con.IPCPath - rawRPCClient, err := rpc.Dial(testIPC) - Expect(err).NotTo(HaveOccurred()) - rpcClient := client.NewRPCClient(rawRPCClient, testIPC) - ethClient := ethclient.NewClient(rawRPCClient) - blockChainClient := client.NewEthClient(ethClient) - madeNode := node.MakeNode(rpcClient) - transactionConverter := rpc2.NewRPCTransactionConverter(ethClient) - blockChain := eth.NewBlockChain(blockChainClient, rpcClient, madeNode, transactionConverter) - - db, err := postgres.NewDB(config.Database{ - Hostname: "localhost", - Name: "vulcanize_testing", - Port: 5432, - }, blockChain.Node()) - Expect(err).NotTo(HaveOccurred()) - - return db, blockChain -} - -func SetupTusdContract(wantedEvents, wantedMethods []string) *contract.Contract { - p := mocks.NewParser(constants.TusdAbiString) - err := p.Parse(constants.TusdContractAddress) - Expect(err).ToNot(HaveOccurred()) - - return contract.Contract{ - Name: "TrueUSD", - Address: constants.TusdContractAddress, - Abi: p.Abi(), - ParsedAbi: p.ParsedAbi(), - StartingBlock: 6194634, - Events: p.GetEvents(wantedEvents), - Methods: p.GetSelectMethods(wantedMethods), - MethodArgs: map[string]bool{}, - FilterArgs: map[string]bool{}, - }.Init() -} - -func SetupENSContract(wantedEvents, wantedMethods []string) *contract.Contract { - p := mocks.NewParser(constants.ENSAbiString) - err := p.Parse(constants.EnsContractAddress) - Expect(err).ToNot(HaveOccurred()) - - return contract.Contract{ - Name: "ENS-Registry", - Address: constants.EnsContractAddress, - Abi: p.Abi(), - ParsedAbi: p.ParsedAbi(), - StartingBlock: 6194634, - Events: p.GetEvents(wantedEvents), - Methods: p.GetSelectMethods(wantedMethods), - MethodArgs: map[string]bool{}, - FilterArgs: map[string]bool{}, - }.Init() -} - -func SetupMarketPlaceContract(wantedEvents, wantedMethods []string) *contract.Contract { - p := mocks.NewParser(constants.MarketPlaceAbiString) - err := p.Parse(constants.MarketPlaceContractAddress) - Expect(err).NotTo(HaveOccurred()) - - return contract.Contract{ - Name: "Marketplace", - Address: constants.MarketPlaceContractAddress, - StartingBlock: 6496012, - Abi: p.Abi(), - ParsedAbi: p.ParsedAbi(), - Events: p.GetEvents(wantedEvents), - Methods: p.GetSelectMethods(wantedMethods), - FilterArgs: map[string]bool{}, - MethodArgs: map[string]bool{}, - }.Init() -} - -func SetupMolochContract(wantedEvents, wantedMethods []string) *contract.Contract { - p := mocks.NewParser(constants.MolochAbiString) - err := p.Parse(constants.MolochContractAddress) - Expect(err).NotTo(HaveOccurred()) - - return contract.Contract{ - Name: "Moloch", - Address: constants.MolochContractAddress, - StartingBlock: 7218566, - Abi: p.Abi(), - ParsedAbi: p.ParsedAbi(), - Events: p.GetEvents(wantedEvents), - Methods: p.GetSelectMethods(wantedMethods), - FilterArgs: map[string]bool{}, - MethodArgs: map[string]bool{}, - }.Init() -} - -// TODO: tear down/setup DB from migrations so this doesn't alter the schema between tests -func TearDown(db *postgres.DB) { - tx, err := db.Beginx() - Expect(err).NotTo(HaveOccurred()) - - _, err = tx.Exec(`DELETE FROM addresses`) - Expect(err).NotTo(HaveOccurred()) - - _, err = tx.Exec(`DELETE FROM headers`) - Expect(err).NotTo(HaveOccurred()) - - _, err = tx.Exec("DELETE FROM header_sync_transactions") - Expect(err).NotTo(HaveOccurred()) - - _, err = tx.Exec(`DELETE FROM header_sync_receipts`) - Expect(err).NotTo(HaveOccurred()) - - _, err = tx.Exec(`DROP TABLE checked_headers`) - Expect(err).NotTo(HaveOccurred()) - - _, err = tx.Exec(`CREATE TABLE checked_headers ( - id SERIAL PRIMARY KEY, - header_id INTEGER UNIQUE NOT NULL REFERENCES headers (id) ON DELETE CASCADE);`) - Expect(err).NotTo(HaveOccurred()) - - _, err = tx.Exec(`DROP SCHEMA IF EXISTS full_0x8dd5fbce2f6a956c3022ba3663759011dd51e73e CASCADE`) - Expect(err).NotTo(HaveOccurred()) - - _, err = tx.Exec(`DROP SCHEMA IF EXISTS header_0x8dd5fbce2f6a956c3022ba3663759011dd51e73e CASCADE`) - Expect(err).NotTo(HaveOccurred()) - - _, err = tx.Exec(`DROP SCHEMA IF EXISTS full_0x314159265dd8dbb310642f98f50c066173c1259b CASCADE`) - Expect(err).NotTo(HaveOccurred()) - - _, err = tx.Exec(`DROP SCHEMA IF EXISTS header_0x314159265dd8dbb310642f98f50c066173c1259b CASCADE`) - Expect(err).NotTo(HaveOccurred()) - - err = tx.Commit() - Expect(err).NotTo(HaveOccurred()) - - _, err = db.Exec(`VACUUM checked_headers`) - Expect(err).NotTo(HaveOccurred()) -} diff --git a/pkg/eth/contract_watcher/shared/helpers/test_helpers/mocks/entities.go b/pkg/eth/contract_watcher/shared/helpers/test_helpers/mocks/entities.go deleted file mode 100644 index 92d460a0..00000000 --- a/pkg/eth/contract_watcher/shared/helpers/test_helpers/mocks/entities.go +++ /dev/null @@ -1,360 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package mocks - -import ( - "encoding/json" - "strings" - - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/common/hexutil" - "github.com/ethereum/go-ethereum/core/types" - - "github.com/vulcanize/vulcanizedb/pkg/config" - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/constants" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" - "github.com/vulcanize/vulcanizedb/pkg/eth/filters" -) - -var TransferBlock1 = core.Block{ - Hash: "0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad123ert", - Number: 6194633, - Transactions: []core.TransactionModel{{ - GasLimit: 0, - GasPrice: 0, - Hash: "0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad654aaa", - Nonce: 0, - Receipt: core.Receipt{ - TxHash: "0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad654aaa", - ContractAddress: "", - Logs: []core.FullSyncLog{{ - BlockNumber: 6194633, - TxHash: "0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad654aaa", - Address: constants.TusdContractAddress, - Topics: core.Topics{ - constants.TransferEvent.Signature(), - "0x000000000000000000000000000000000000000000000000000000000000af21", - "0x9dd48110dcc444fdc242510c09bbbbe21a5975cac061d82f7b843bce061ba391", - "", - }, - Index: 1, - Data: "0x000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc200000000000000000000000089d24a6b4ccb1b6faa2625fe562bdd9a23260359000000000000000000000000000000000000000000000000392d2e2bda9c00000000000000000000000000000000000000000000000000927f41fa0a4a418000000000000000000000000000000000000000000000000000000000005adcfebe", - }}, - }, - TxIndex: 0, - Value: "0", - }}, -} - -var TransferBlock2 = core.Block{ - Hash: "0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad123ooo", - Number: 6194634, - Transactions: []core.TransactionModel{{ - GasLimit: 0, - GasPrice: 0, - Hash: "0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad654eee", - Nonce: 0, - Receipt: core.Receipt{ - TxHash: "0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad654eee", - ContractAddress: "", - Logs: []core.FullSyncLog{{ - BlockNumber: 6194634, - TxHash: "0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad654eee", - Address: constants.TusdContractAddress, - Topics: core.Topics{ - constants.TransferEvent.Signature(), - "0x000000000000000000000000000000000000000000000000000000000000af21", - "0x9dd48110dcc444fdc242510c09bbbbe21a5975cac061d82f7b843bce061ba391", - "", - }, - Index: 1, - Data: "0x000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc200000000000000000000000089d24a6b4ccb1b6faa2625fe562bdd9a23260359000000000000000000000000000000000000000000000000392d2e2bda9c00000000000000000000000000000000000000000000000000927f41fa0a4a418000000000000000000000000000000000000000000000000000000000005adcfebe", - }}, - }, - TxIndex: 0, - Value: "0", - }}, -} - -var NewOwnerBlock1 = core.Block{ - Hash: "0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad123ppp", - Number: 6194635, - Transactions: []core.TransactionModel{{ - GasLimit: 0, - GasPrice: 0, - Hash: "0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad654bbb", - Nonce: 0, - Receipt: core.Receipt{ - TxHash: "0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad654bbb", - ContractAddress: "", - Logs: []core.FullSyncLog{{ - BlockNumber: 6194635, - TxHash: "0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad654bbb", - Address: constants.EnsContractAddress, - Topics: core.Topics{ - constants.NewOwnerEvent.Signature(), - "0x0000000000000000000000000000000000000000000000000000c02aaa39b223", - "0x9dd48110dcc444fdc242510c09bbbbe21a5975cac061d82f7b843bce061ba391", - "", - }, - Index: 1, - Data: "0x000000000000000000000000000000000000000000000000000000000000af21", - }}, - }, - TxIndex: 0, - Value: "0", - }}, -} - -var NewOwnerBlock2 = core.Block{ - Hash: "0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad123ggg", - Number: 6194636, - Transactions: []core.TransactionModel{{ - GasLimit: 0, - GasPrice: 0, - Hash: "0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad654lll", - Nonce: 0, - Receipt: core.Receipt{ - TxHash: "0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad654lll", - ContractAddress: "", - Logs: []core.FullSyncLog{{ - BlockNumber: 6194636, - TxHash: "0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad654lll", - Address: constants.EnsContractAddress, - Topics: core.Topics{ - constants.NewOwnerEvent.Signature(), - "0x0000000000000000000000000000000000000000000000000000c02aaa39b223", - "0x9dd48110dcc444fdc242510c09bbbbe21a5975cac061d82f7b843bce061ba400", - "", - }, - Index: 1, - Data: "0x000000000000000000000000000000000000000000000000000000000000af21", - }}, - }, - TxIndex: 0, - Value: "0", - }}, -} - -var ExpectedTransferFilter = filters.LogFilter{ - Name: constants.TusdContractAddress + "_" + "Transfer", - Address: constants.TusdContractAddress, - ToBlock: -1, - FromBlock: 6194634, - Topics: core.Topics{constants.TransferEvent.Signature()}, -} - -var ExpectedApprovalFilter = filters.LogFilter{ - Name: constants.TusdContractAddress + "_" + "Approval", - Address: constants.TusdContractAddress, - ToBlock: -1, - FromBlock: 6194634, - Topics: core.Topics{constants.ApprovalEvent.Signature()}, -} - -var MockTranferEvent = core.WatchedEvent{ - LogID: 1, - Name: constants.TransferEvent.String(), - BlockNumber: 5488076, - Address: constants.TusdContractAddress, - TxHash: "0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad6546ae", - Index: 110, - Topic0: constants.TransferEvent.Signature(), - Topic1: "0x000000000000000000000000000000000000000000000000000000000000af21", - Topic2: "0x9dd48110dcc444fdc242510c09bbbbe21a5975cac061d82f7b843bce061ba391", - Topic3: "", - Data: "0x000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc200000000000000000000000089d24a6b4ccb1b6faa2625fe562bdd9a23260359000000000000000000000000000000000000000000000000392d2e2bda9c00000000000000000000000000000000000000000000000000927f41fa0a4a418000000000000000000000000000000000000000000000000000000000005adcfebe", -} - -var rawFakeHeader, _ = json.Marshal(core.Header{}) - -var MockHeader1 = core.Header{ - Hash: "0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad123ert", - BlockNumber: 6194632, - Raw: rawFakeHeader, - Timestamp: "50000000", -} - -var MockHeader2 = core.Header{ - Hash: "0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad456yui", - BlockNumber: 6194633, - Raw: rawFakeHeader, - Timestamp: "50000015", -} - -var MockHeader3 = core.Header{ - Hash: "0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad234hfs", - BlockNumber: 6194634, - Raw: rawFakeHeader, - Timestamp: "50000030", -} - -var MockHeader4 = core.Header{ - Hash: "0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad234hfs", - BlockNumber: 6194635, - Raw: rawFakeHeader, - Timestamp: "50000030", -} - -var MockTransferLog1 = types.Log{ - Index: 1, - Address: common.HexToAddress(constants.TusdContractAddress), - BlockNumber: 5488076, - TxIndex: 110, - TxHash: common.HexToHash("0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad6546ae"), - Topics: []common.Hash{ - common.HexToHash(constants.TransferEvent.Signature()), - common.HexToHash("0x000000000000000000000000000000000000000000000000000000000000af21"), - common.HexToHash("0x9dd48110dcc444fdc242510c09bbbbe21a5975cac061d82f7b843bce061ba391"), - }, - Data: hexutil.MustDecode("0x000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc200000000000000000000000089d24a6b4ccb1b6faa2625fe562bdd9a23260359000000000000000000000000000000000000000000000000392d2e2bda9c00000000000000000000000000000000000000000000000000927f41fa0a4a418000000000000000000000000000000000000000000000000000000000005adcfebe"), -} - -var MockTransferLog2 = types.Log{ - Index: 3, - Address: common.HexToAddress(constants.TusdContractAddress), - BlockNumber: 5488077, - TxIndex: 2, - TxHash: common.HexToHash("0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad6546df"), - Topics: []common.Hash{ - common.HexToHash(constants.TransferEvent.Signature()), - common.HexToHash("0x9dd48110dcc444fdc242510c09bbbbe21a5975cac061d82f7b843bce061ba391"), - common.HexToHash("0x000000000000000000000000000000000000000000000000000000000000af21"), - }, - Data: hexutil.MustDecode("0x000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc200000000000000000000000089d24a6b4ccb1b6faa2625fe562bdd9a23260359000000000000000000000000000000000000000000000000392d2e2bda9c00000000000000000000000000000000000000000000000000927f41fa0a4a418000000000000000000000000000000000000000000000000000000000005adcfebe"), -} - -var MockMintLog = types.Log{ - Index: 10, - Address: common.HexToAddress(constants.TusdContractAddress), - BlockNumber: 5488080, - TxIndex: 50, - TxHash: common.HexToHash("0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad6minty"), - Topics: []common.Hash{ - common.HexToHash(constants.MintEvent.Signature()), - common.HexToHash("0x000000000000000000000000000000000000000000000000000000000000af21"), - }, - Data: hexutil.MustDecode("0x000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc200000000000000000000000089d24a6b4ccb1b6faa2625fe562bdd9a23260359000000000000000000000000000000000000000000000000392d2e2bda9c00000000000000000000000000000000000000000000000000927f41fa0a4a418000000000000000000000000000000000000000000000000000000000005adcfebe"), -} - -var MockNewOwnerLog1 = types.Log{ - Index: 1, - Address: common.HexToAddress(constants.EnsContractAddress), - BlockNumber: 5488076, - TxIndex: 110, - TxHash: common.HexToHash("0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad6546ae"), - Topics: []common.Hash{ - common.HexToHash(constants.NewOwnerEvent.Signature()), - common.HexToHash("0x000000000000000000000000c02aaa39b223helloa0e5c4f27ead9083c752553"), - common.HexToHash("0x9dd48110dcc444fdc242510c09bbbbe21a5975cac061d82f7b843bce061ba391"), - }, - Data: hexutil.MustDecode("0x000000000000000000000000000000000000000000000000000000000000af21"), -} - -var MockNewOwnerLog2 = types.Log{ - Index: 3, - Address: common.HexToAddress(constants.EnsContractAddress), - BlockNumber: 5488077, - TxIndex: 2, - TxHash: common.HexToHash("0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad6546df"), - Topics: []common.Hash{ - common.HexToHash(constants.NewOwnerEvent.Signature()), - common.HexToHash("0x000000000000000000000000c02aaa39b223helloa0e5c4f27ead9083c752553"), - common.HexToHash("0x9dd48110dcc444fdc242510c09bbbbe21a5975cac061d82f7b843bce061ba400"), - }, - Data: hexutil.MustDecode("0x000000000000000000000000000000000000000000000000000000000000af21"), -} - -var MockOrderCreatedLog = types.Log{ - Address: common.HexToAddress(constants.MarketPlaceContractAddress), - Topics: []common.Hash{ - common.HexToHash("0x84c66c3f7ba4b390e20e8e8233e2a516f3ce34a72749e4f12bd010dfba238039"), - common.HexToHash("0xffffffffffffffffffffffffffffff72ffffffffffffffffffffffffffffffd0"), - common.HexToHash("0x00000000000000000000000083b7b6f360a9895d163ea797d9b939b9173b292a"), - }, - Data: hexutil.MustDecode("0x633f94affdcabe07c000231f85c752c97b9cc43966b432ec4d18641e6d178233000000000000000000000000f87e31492faf9a91b02ee0deaad50d51d56d5d4d0000000000000000000000000000000000000000000003da9fbcf4446d6000000000000000000000000000000000000000000000000000000000016db2524880"), - BlockNumber: 8587618, - TxHash: common.HexToHash("0x7ad9e2f88416738f3c7ad0a6d260f71794532206a0e838299f5014b4fe81e66e"), - TxIndex: 93, - BlockHash: common.HexToHash("0x06a1762b7f2e070793fc24cd785de0fa485e728832c4f3469790153ae51a56a2"), - Index: 59, - Removed: false, -} - -var MockSubmitVoteLog = types.Log{ - Address: common.HexToAddress(constants.MolochContractAddress), - Topics: []common.Hash{ - common.HexToHash("0x29bf0061f2faa9daa482f061b116195432d435536d8af4ae6b3c5dd78223679b"), - common.HexToHash("0x0000000000000000000000000000000000000000000000000000000000000061"), - common.HexToHash("0x0000000000000000000000006ddf1b8e6d71b5b33912607098be123ffe62ae53"), - common.HexToHash("0x00000000000000000000000037385081870ef47e055410fefd582e2a95d2960b"), - }, - Data: hexutil.MustDecode("0x0000000000000000000000000000000000000000000000000000000000000001"), - BlockNumber: 8517621, - TxHash: common.HexToHash("0xcc7390a2099812d0dfc9baef201afbc7a44bfae145050c9dc700b77cbd3cd752"), - TxIndex: 103, - BlockHash: common.HexToHash("0x3e82681d8036b1225fcaa8bcd4cdbe757b39f13468286b303cde22146385525e"), - Index: 132, - Removed: false, -} - -var ens = strings.ToLower(constants.EnsContractAddress) -var tusd = strings.ToLower(constants.TusdContractAddress) - -var MockConfig = config.ContractConfig{ - Network: "", - Addresses: map[string]bool{ - "0x1234567890abcdef": true, - }, - Abis: map[string]string{ - "0x1234567890abcdef": "fake_abi", - }, - Events: map[string][]string{ - "0x1234567890abcdef": {"Transfer"}, - }, - Methods: map[string][]string{ - "0x1234567890abcdef": nil, - }, - MethodArgs: map[string][]string{ - "0x1234567890abcdef": nil, - }, - EventArgs: map[string][]string{ - "0x1234567890abcdef": nil, - }, -} - -var MockEmptyConfig = config.ContractConfig{ - Network: "", - Addresses: map[string]bool{ - "0x1234567890abcdef": true, - }, - Abis: map[string]string{ - "0x1234567890abcdef": "fake_abi", - }, - Events: map[string][]string{ - "0x1234567890abcdef": nil, - }, - Methods: map[string][]string{ - "0x1234567890abcdef": nil, - }, - MethodArgs: map[string][]string{ - "0x1234567890abcdef": nil, - }, - EventArgs: map[string][]string{ - "0x1234567890abcdef": nil, - }, -} diff --git a/pkg/eth/contract_watcher/shared/helpers/test_helpers/mocks/parser.go b/pkg/eth/contract_watcher/shared/helpers/test_helpers/mocks/parser.go deleted file mode 100644 index 415ffbc5..00000000 --- a/pkg/eth/contract_watcher/shared/helpers/test_helpers/mocks/parser.go +++ /dev/null @@ -1,165 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package mocks - -import ( - "github.com/ethereum/go-ethereum/accounts/abi" - "github.com/vulcanize/vulcanizedb/pkg/eth" - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/parser" - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/types" -) - -// Mock parser -// Is given ABI string instead of address -// Performs all other functions of the real parser -type mockParser struct { - abi string - parsedAbi abi.ABI -} - -func NewParser(abi string) parser.Parser { - return &mockParser{ - abi: abi, - } -} - -func (p *mockParser) Abi() string { - return p.abi -} - -func (p *mockParser) ParsedAbi() abi.ABI { - return p.parsedAbi -} - -func (p *mockParser) ParseAbiStr(abiStr string) error { - panic("implement me") -} - -// Retrieves and parses the abi string -// for the given contract address -func (p *mockParser) Parse(contractAddr string) error { - var err error - p.parsedAbi, err = eth.ParseAbi(p.abi) - - return err -} - -// Returns only specified methods, if they meet the criteria -// Returns as array with methods in same order they were specified -// Nil wanted array => no events are returned -func (p *mockParser) GetSelectMethods(wanted []string) []types.Method { - wLen := len(wanted) - if wLen == 0 { - return nil - } - methods := make([]types.Method, wLen) - for _, m := range p.parsedAbi.Methods { - for i, name := range wanted { - if name == m.Name && okTypes(m, wanted) { - methods[i] = types.NewMethod(m) - } - } - } - - return methods -} - -// Returns wanted methods -// Empty wanted array => all methods are returned -// Nil wanted array => no methods are returned -func (p *mockParser) GetMethods(wanted []string) []types.Method { - if wanted == nil { - return nil - } - methods := make([]types.Method, 0) - length := len(wanted) - for _, m := range p.parsedAbi.Methods { - if length == 0 || stringInSlice(wanted, m.Name) { - methods = append(methods, types.NewMethod(m)) - } - } - - return methods -} - -// Returns wanted events as map of types.Events -// If no events are specified, all events are returned -func (p *mockParser) GetEvents(wanted []string) map[string]types.Event { - events := map[string]types.Event{} - - for _, e := range p.parsedAbi.Events { - if len(wanted) == 0 || stringInSlice(wanted, e.Name) { - event := types.NewEvent(e) - events[e.Name] = event - } - } - - return events -} - -func stringInSlice(list []string, s string) bool { - for _, b := range list { - if b == s { - return true - } - } - - return false -} - -func okTypes(m abi.Method, wanted []string) bool { - // Only return method if it has less than 3 arguments, a single output value, and it is a method we want or we want all methods (empty 'wanted' slice) - if len(m.Inputs) < 3 && len(m.Outputs) == 1 && (len(wanted) == 0 || stringInSlice(wanted, m.Name)) { - // Only return methods if inputs are all of accepted types and output is of the accepted types - if !okReturnType(m.Outputs[0]) { - return false - } - for _, input := range m.Inputs { - switch input.Type.T { - case abi.AddressTy, abi.HashTy, abi.BytesTy, abi.FixedBytesTy: - default: - return false - } - } - - return true - } - - return false -} - -func okReturnType(arg abi.Argument) bool { - wantedTypes := []byte{ - abi.UintTy, - abi.IntTy, - abi.BoolTy, - abi.StringTy, - abi.AddressTy, - abi.HashTy, - abi.BytesTy, - abi.FixedBytesTy, - abi.FixedPointTy, - } - - for _, ty := range wantedTypes { - if arg.Type.T == ty { - return true - } - } - - return false -} diff --git a/pkg/eth/contract_watcher/shared/helpers/test_helpers/test_data.go b/pkg/eth/contract_watcher/shared/helpers/test_helpers/test_data.go deleted file mode 100644 index f592f920..00000000 --- a/pkg/eth/contract_watcher/shared/helpers/test_helpers/test_data.go +++ /dev/null @@ -1,109 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package test_helpers - -import ( - "strings" - - "github.com/vulcanize/vulcanizedb/pkg/config" - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/constants" -) - -var ens = strings.ToLower(constants.EnsContractAddress) -var tusd = strings.ToLower(constants.TusdContractAddress) - -var TusdConfig = config.ContractConfig{ - Network: "", - Addresses: map[string]bool{ - tusd: true, - }, - Abis: map[string]string{ - tusd: "", - }, - Events: map[string][]string{ - tusd: {"Transfer"}, - }, - Methods: map[string][]string{ - tusd: nil, - }, - MethodArgs: map[string][]string{ - tusd: nil, - }, - EventArgs: map[string][]string{ - tusd: nil, - }, - StartingBlocks: map[string]int64{ - tusd: 5197514, - }, -} - -var ENSConfig = config.ContractConfig{ - Network: "", - Addresses: map[string]bool{ - ens: true, - }, - Abis: map[string]string{ - ens: "", - }, - Events: map[string][]string{ - ens: {"NewOwner"}, - }, - Methods: map[string][]string{ - ens: nil, - }, - MethodArgs: map[string][]string{ - ens: nil, - }, - EventArgs: map[string][]string{ - ens: nil, - }, - StartingBlocks: map[string]int64{ - ens: 3327417, - }, -} - -var ENSandTusdConfig = config.ContractConfig{ - Network: "", - Addresses: map[string]bool{ - ens: true, - tusd: true, - }, - Abis: map[string]string{ - ens: "", - tusd: "", - }, - Events: map[string][]string{ - ens: {"NewOwner"}, - tusd: {"Transfer"}, - }, - Methods: map[string][]string{ - ens: nil, - tusd: nil, - }, - MethodArgs: map[string][]string{ - ens: nil, - tusd: nil, - }, - EventArgs: map[string][]string{ - ens: nil, - tusd: nil, - }, - StartingBlocks: map[string]int64{ - ens: 3327417, - tusd: 5197514, - }, -} diff --git a/pkg/eth/contract_watcher/shared/parser/parser.go b/pkg/eth/contract_watcher/shared/parser/parser.go deleted file mode 100644 index 56e9ba87..00000000 --- a/pkg/eth/contract_watcher/shared/parser/parser.go +++ /dev/null @@ -1,221 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package parser - -import ( - "errors" - - "github.com/ethereum/go-ethereum/accounts/abi" - "github.com/ethereum/go-ethereum/common" - - "github.com/vulcanize/vulcanizedb/pkg/eth" - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/constants" - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/types" -) - -// Parser is used to fetch and parse contract ABIs -// It is dependent on etherscan's api -type Parser interface { - Parse(contractAddr string) error - ParseAbiStr(abiStr string) error - Abi() string - ParsedAbi() abi.ABI - GetMethods(wanted []string) []types.Method - GetSelectMethods(wanted []string) []types.Method - GetEvents(wanted []string) map[string]types.Event -} - -type parser struct { - client *eth.EtherScanAPI - abi string - parsedAbi abi.ABI -} - -// NewParser returns a new Parser -func NewParser(network string) Parser { - url := eth.GenURL(network) - - return &parser{ - client: eth.NewEtherScanClient(url), - } -} - -// Abi returns the parser's configured abi string -func (p *parser) Abi() string { - return p.abi -} - -// ParsedAbi returns the parser's parsed abi -func (p *parser) ParsedAbi() abi.ABI { - return p.parsedAbi -} - -// Parse retrieves and parses the abi string -// for the given contract address -func (p *parser) Parse(contractAddr string) error { - // If the abi is one our locally stored abis, fetch - // TODO: Allow users to pass abis through config - knownAbi, err := p.lookUp(contractAddr) - if err == nil { - p.abi = knownAbi - p.parsedAbi, err = eth.ParseAbi(knownAbi) - return err - } - // Try getting abi from etherscan - abiStr, err := p.client.GetAbi(contractAddr) - if err != nil { - return err - } - //TODO: Implement other ways to fetch abi - p.abi = abiStr - p.parsedAbi, err = eth.ParseAbi(abiStr) - - return err -} - -// ParseAbiStr loads and parses an abi from a given abi string -func (p *parser) ParseAbiStr(abiStr string) error { - var err error - p.abi = abiStr - p.parsedAbi, err = eth.ParseAbi(abiStr) - - return err -} - -func (p *parser) lookUp(contractAddr string) (string, error) { - if v, ok := constants.ABIs[common.HexToAddress(contractAddr)]; ok { - return v, nil - } - - return "", errors.New("ABI not present in lookup table") -} - -// GetSelectMethods returns only specified methods, if they meet the criteria -// Returns as array with methods in same order they were specified -// Nil or empty wanted array => no events are returned -func (p *parser) GetSelectMethods(wanted []string) []types.Method { - wLen := len(wanted) - if wLen == 0 { - return nil - } - methods := make([]types.Method, wLen) - for _, m := range p.parsedAbi.Methods { - for i, name := range wanted { - if name == m.Name && okTypes(m, wanted) { - methods[i] = types.NewMethod(m) - } - } - } - - return methods -} - -// GetMethods returns wanted methods -// Empty wanted array => all methods are returned -// Nil wanted array => no methods are returned -func (p *parser) GetMethods(wanted []string) []types.Method { - if wanted == nil { - return nil - } - methods := make([]types.Method, 0) - length := len(wanted) - for _, m := range p.parsedAbi.Methods { - if length == 0 || stringInSlice(wanted, m.Name) { - methods = append(methods, types.NewMethod(m)) - } - } - - return methods -} - -// GetEvents returns wanted events as map of types.Events -// Empty wanted array => all events are returned -// Nil wanted array => no events are returned -func (p *parser) GetEvents(wanted []string) map[string]types.Event { - events := map[string]types.Event{} - if wanted == nil { - return events - } - - length := len(wanted) - for _, e := range p.parsedAbi.Events { - if length == 0 || stringInSlice(wanted, e.Name) { - events[e.Name] = types.NewEvent(e) - } - } - - return events -} - -func okReturnType(arg abi.Argument) bool { - wantedTypes := []byte{ - abi.UintTy, - abi.IntTy, - abi.BoolTy, - abi.StringTy, - abi.AddressTy, - abi.HashTy, - abi.BytesTy, - abi.FixedBytesTy, - abi.FixedPointTy, - } - - for _, ty := range wantedTypes { - if arg.Type.T == ty { - return true - } - } - - return false -} - -func okTypes(m abi.Method, wanted []string) bool { - // Only return method if it has less than 3 arguments, a single output value, and it is a method we want or we want all methods (empty 'wanted' slice) - if len(m.Inputs) < 3 && len(m.Outputs) == 1 && (len(wanted) == 0 || stringInSlice(wanted, m.Name)) { - // Only return methods if inputs are all of accepted types and output is of the accepted types - if !okReturnType(m.Outputs[0]) { - return false - } - for _, input := range m.Inputs { - switch input.Type.T { - // Addresses are properly labeled and caught - // But hashes tend to not be explicitly labeled and caught - // Instead bytes32 are assumed to be hashes - case abi.AddressTy, abi.HashTy: - case abi.FixedBytesTy: - if input.Type.Size != 32 { - return false - } - default: - return false - } - } - return true - } - - return false -} - -func stringInSlice(list []string, s string) bool { - for _, b := range list { - if b == s { - return true - } - } - - return false -} diff --git a/pkg/eth/contract_watcher/shared/parser/parser_suite_test.go b/pkg/eth/contract_watcher/shared/parser/parser_suite_test.go deleted file mode 100644 index ce6d4944..00000000 --- a/pkg/eth/contract_watcher/shared/parser/parser_suite_test.go +++ /dev/null @@ -1,35 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package parser_test - -import ( - "io/ioutil" - "log" - "testing" - - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" -) - -func TestParser(t *testing.T) { - RegisterFailHandler(Fail) - RunSpecs(t, "Parser Suite Test") -} - -var _ = BeforeSuite(func() { - log.SetOutput(ioutil.Discard) -}) diff --git a/pkg/eth/contract_watcher/shared/parser/parser_test.go b/pkg/eth/contract_watcher/shared/parser/parser_test.go deleted file mode 100644 index 49645c6b..00000000 --- a/pkg/eth/contract_watcher/shared/parser/parser_test.go +++ /dev/null @@ -1,226 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package parser_test - -import ( - "github.com/ethereum/go-ethereum/accounts/abi" - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - - "github.com/vulcanize/vulcanizedb/pkg/eth" - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/constants" - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/helpers/test_helpers/mocks" - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/parser" - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/types" -) - -var _ = Describe("Parser", func() { - - var p parser.Parser - var err error - - BeforeEach(func() { - p = parser.NewParser("") - }) - - Describe("Mock Parse", func() { - It("Uses parses given abi string", func() { - mp := mocks.NewParser(constants.DaiAbiString) - err = mp.Parse(constants.DaiContractAddress) - Expect(err).ToNot(HaveOccurred()) - - parsedAbi := mp.ParsedAbi() - expectedAbi, err := eth.ParseAbi(constants.DaiAbiString) - Expect(err).ToNot(HaveOccurred()) - Expect(parsedAbi).To(Equal(expectedAbi)) - - methods := mp.GetSelectMethods([]string{"balanceOf"}) - Expect(len(methods)).To(Equal(1)) - balOf := methods[0] - Expect(balOf.Name).To(Equal("balanceOf")) - Expect(len(balOf.Args)).To(Equal(1)) - Expect(len(balOf.Return)).To(Equal(1)) - - events := mp.GetEvents([]string{"Transfer"}) - _, ok := events["Mint"] - Expect(ok).To(Equal(false)) - e, ok := events["Transfer"] - Expect(ok).To(Equal(true)) - Expect(len(e.Fields)).To(Equal(3)) - }) - }) - - Describe("Parse", func() { - It("Fetches and parses abi from etherscan using contract address", func() { - contractAddr := "0x89d24a6b4ccb1b6faa2625fe562bdd9a23260359" // dai contract address - err = p.Parse(contractAddr) - Expect(err).ToNot(HaveOccurred()) - - expectedAbi := constants.DaiAbiString - Expect(p.Abi()).To(Equal(expectedAbi)) - - expectedParsedAbi, err := eth.ParseAbi(expectedAbi) - Expect(err).ToNot(HaveOccurred()) - Expect(p.ParsedAbi()).To(Equal(expectedParsedAbi)) - }) - - It("Fails with a normal, non-contract, account address", func() { - addr := "0xAb2A8F7cB56D9EC65573BA1bE0f92Fa2Ff7dd165" - err = p.Parse(addr) - Expect(err).To(HaveOccurred()) - }) - }) - - Describe("GetEvents", func() { - It("Returns parsed events", func() { - contractAddr := "0x89d24a6b4ccb1b6faa2625fe562bdd9a23260359" - err = p.Parse(contractAddr) - Expect(err).ToNot(HaveOccurred()) - - events := p.GetEvents([]string{"Transfer"}) - - e, ok := events["Transfer"] - Expect(ok).To(Equal(true)) - - abiTy := e.Fields[0].Type.T - Expect(abiTy).To(Equal(abi.AddressTy)) - - pgTy := e.Fields[0].PgType - Expect(pgTy).To(Equal("CHARACTER VARYING(66)")) - - abiTy = e.Fields[1].Type.T - Expect(abiTy).To(Equal(abi.AddressTy)) - - pgTy = e.Fields[1].PgType - Expect(pgTy).To(Equal("CHARACTER VARYING(66)")) - - abiTy = e.Fields[2].Type.T - Expect(abiTy).To(Equal(abi.UintTy)) - - pgTy = e.Fields[2].PgType - Expect(pgTy).To(Equal("NUMERIC")) - - _, ok = events["Approval"] - Expect(ok).To(Equal(false)) - }) - }) - - Describe("GetSelectMethods", func() { - It("Parses and returns only methods specified in passed array", func() { - contractAddr := "0x89d24a6b4ccb1b6faa2625fe562bdd9a23260359" - err = p.Parse(contractAddr) - Expect(err).ToNot(HaveOccurred()) - - methods := p.GetSelectMethods([]string{"balanceOf"}) - Expect(len(methods)).To(Equal(1)) - - balOf := methods[0] - Expect(balOf.Name).To(Equal("balanceOf")) - Expect(len(balOf.Args)).To(Equal(1)) - Expect(len(balOf.Return)).To(Equal(1)) - - abiTy := balOf.Args[0].Type.T - Expect(abiTy).To(Equal(abi.AddressTy)) - - pgTy := balOf.Args[0].PgType - Expect(pgTy).To(Equal("CHARACTER VARYING(66)")) - - abiTy = balOf.Return[0].Type.T - Expect(abiTy).To(Equal(abi.UintTy)) - - pgTy = balOf.Return[0].PgType - Expect(pgTy).To(Equal("NUMERIC")) - - }) - - It("Parses and returns methods in the order they were specified", func() { - contractAddr := "0x89d24a6b4ccb1b6faa2625fe562bdd9a23260359" - err = p.Parse(contractAddr) - Expect(err).ToNot(HaveOccurred()) - - selectMethods := p.GetSelectMethods([]string{"balanceOf", "allowance"}) - Expect(len(selectMethods)).To(Equal(2)) - - balOf := selectMethods[0] - allow := selectMethods[1] - - Expect(balOf.Name).To(Equal("balanceOf")) - Expect(allow.Name).To(Equal("allowance")) - }) - - It("Returns nil if given a nil or empty array", func() { - contractAddr := "0x89d24a6b4ccb1b6faa2625fe562bdd9a23260359" - err = p.Parse(contractAddr) - Expect(err).ToNot(HaveOccurred()) - - var nilArr []types.Method - selectMethods := p.GetSelectMethods([]string{}) - Expect(selectMethods).To(Equal(nilArr)) - selectMethods = p.GetMethods(nil) - Expect(selectMethods).To(Equal(nilArr)) - }) - - }) - - Describe("GetMethods", func() { - It("Parses and returns only methods specified in passed array", func() { - contractAddr := "0x89d24a6b4ccb1b6faa2625fe562bdd9a23260359" - err = p.Parse(contractAddr) - Expect(err).ToNot(HaveOccurred()) - - methods := p.GetMethods([]string{"balanceOf"}) - Expect(len(methods)).To(Equal(1)) - - balOf := methods[0] - Expect(balOf.Name).To(Equal("balanceOf")) - Expect(len(balOf.Args)).To(Equal(1)) - Expect(len(balOf.Return)).To(Equal(1)) - - abiTy := balOf.Args[0].Type.T - Expect(abiTy).To(Equal(abi.AddressTy)) - - pgTy := balOf.Args[0].PgType - Expect(pgTy).To(Equal("CHARACTER VARYING(66)")) - - abiTy = balOf.Return[0].Type.T - Expect(abiTy).To(Equal(abi.UintTy)) - - pgTy = balOf.Return[0].PgType - Expect(pgTy).To(Equal("NUMERIC")) - - }) - - It("Returns nil if given a nil array", func() { - contractAddr := "0x89d24a6b4ccb1b6faa2625fe562bdd9a23260359" - err = p.Parse(contractAddr) - Expect(err).ToNot(HaveOccurred()) - - var nilArr []types.Method - selectMethods := p.GetMethods(nil) - Expect(selectMethods).To(Equal(nilArr)) - }) - - It("Returns every method if given an empty array", func() { - contractAddr := "0x89d24a6b4ccb1b6faa2625fe562bdd9a23260359" - err = p.Parse(contractAddr) - Expect(err).ToNot(HaveOccurred()) - - selectMethods := p.GetMethods([]string{}) - Expect(len(selectMethods)).To(Equal(25)) - }) - }) -}) diff --git a/pkg/eth/contract_watcher/shared/poller/poller.go b/pkg/eth/contract_watcher/shared/poller/poller.go deleted file mode 100644 index e95157c9..00000000 --- a/pkg/eth/contract_watcher/shared/poller/poller.go +++ /dev/null @@ -1,291 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package poller - -import ( - "errors" - "fmt" - "math/big" - "strconv" - - "github.com/ethereum/go-ethereum/accounts/abi" - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/common/hexutil" - - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/contract" - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/repository" - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/types" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" - "github.com/vulcanize/vulcanizedb/pkg/postgres" -) - -// Poller is the interface for polling public contract methods -type Poller interface { - PollContract(con contract.Contract, lastBlock int64) error - PollContractAt(con contract.Contract, blockNumber int64) error - FetchContractData(contractAbi, contractAddress, method string, methodArgs []interface{}, result interface{}, blockNumber int64) error -} - -type poller struct { - repository.MethodRepository - bc core.BlockChain - contract contract.Contract -} - -// NewPoller returns a new Poller -func NewPoller(blockChain core.BlockChain, db *postgres.DB, mode types.Mode) Poller { - return &poller{ - MethodRepository: repository.NewMethodRepository(db, mode), - bc: blockChain, - } -} - -// PollContract polls a contract's public methods from the contracts starting block to specified last block -func (p *poller) PollContract(con contract.Contract, lastBlock int64) error { - for i := con.StartingBlock; i <= lastBlock; i++ { - if err := p.PollContractAt(con, i); err != nil { - return err - } - } - - return nil -} - -// PollContractAt polls a contract's public getter methods at the specified block height -func (p *poller) PollContractAt(con contract.Contract, blockNumber int64) error { - p.contract = con - for _, m := range con.Methods { - switch len(m.Args) { - case 0: - if err := p.pollNoArgAt(m, blockNumber); err != nil { - return err - } - case 1: - if err := p.pollSingleArgAt(m, blockNumber); err != nil { - return err - } - case 2: - if err := p.pollDoubleArgAt(m, blockNumber); err != nil { - return err - } - default: - return errors.New("poller error: too many arguments to handle") - - } - } - - return nil -} - -func (p *poller) pollNoArgAt(m types.Method, bn int64) error { - result := types.Result{ - Block: bn, - Method: m, - Inputs: nil, - PgType: m.Return[0].PgType, - } - - var out interface{} - err := p.bc.FetchContractData(p.contract.Abi, p.contract.Address, m.Name, nil, &out, bn) - if err != nil { - return fmt.Errorf("poller error calling 0 argument method\r\nblock: %d, method: %s, contract: %s\r\nerr: %v", bn, m.Name, p.contract.Address, err) - } - strOut, err := stringify(out) - if err != nil { - return err - } - - // Cache returned value if piping is turned on - p.cache(out) - result.Output = strOut - - // Persist result immediately - err = p.PersistResults([]types.Result{result}, m, p.contract.Address, p.contract.Name) - if err != nil { - return fmt.Errorf("poller error persisting 0 argument method result\r\nblock: %d, method: %s, contract: %s\r\nerr: %v", bn, m.Name, p.contract.Address, err) - } - - return nil -} - -// Use token holder address to poll methods that take 1 address argument (e.g. balanceOf) -func (p *poller) pollSingleArgAt(m types.Method, bn int64) error { - result := types.Result{ - Block: bn, - Method: m, - Inputs: make([]interface{}, 1), - PgType: m.Return[0].PgType, - } - - // Depending on the type of the arg choose - // the correct argument set to iterate over - var args map[interface{}]bool - switch m.Args[0].Type.T { - case abi.HashTy, abi.FixedBytesTy: - args = p.contract.EmittedHashes - case abi.AddressTy: - args = p.contract.EmittedAddrs - } - if len(args) == 0 { // If we haven't collected any args by now we can't call the method - return nil - } - - results := make([]types.Result, 0, len(args)) - for arg := range args { - in := []interface{}{arg} - strIn := []interface{}{contract.StringifyArg(arg)} - - var out interface{} - err := p.bc.FetchContractData(p.contract.Abi, p.contract.Address, m.Name, in, &out, bn) - if err != nil { - return fmt.Errorf("poller error calling 1 argument method\r\nblock: %d, method: %s, contract: %s\r\nerr: %v", bn, m.Name, p.contract.Address, err) - } - strOut, err := stringify(out) - if err != nil { - return err - } - p.cache(out) - - // Write inputs and outputs to result and append result to growing set - result.Inputs = strIn - result.Output = strOut - results = append(results, result) - } - // Persist result set as batch - err := p.PersistResults(results, m, p.contract.Address, p.contract.Name) - if err != nil { - return fmt.Errorf("poller error persisting 1 argument method result\r\nblock: %d, method: %s, contract: %s\r\nerr: %v", bn, m.Name, p.contract.Address, err) - } - - return nil -} - -// Use token holder address to poll methods that take 2 address arguments (e.g. allowance) -func (p *poller) pollDoubleArgAt(m types.Method, bn int64) error { - result := types.Result{ - Block: bn, - Method: m, - Inputs: make([]interface{}, 2), - PgType: m.Return[0].PgType, - } - - // Depending on the type of the args choose - // the correct argument sets to iterate over - var firstArgs map[interface{}]bool - switch m.Args[0].Type.T { - case abi.HashTy, abi.FixedBytesTy: - firstArgs = p.contract.EmittedHashes - case abi.AddressTy: - firstArgs = p.contract.EmittedAddrs - } - if len(firstArgs) == 0 { - return nil - } - - var secondArgs map[interface{}]bool - switch m.Args[1].Type.T { - case abi.HashTy, abi.FixedBytesTy: - secondArgs = p.contract.EmittedHashes - case abi.AddressTy: - secondArgs = p.contract.EmittedAddrs - } - if len(secondArgs) == 0 { - return nil - } - - results := make([]types.Result, 0, len(firstArgs)*len(secondArgs)) - for arg1 := range firstArgs { - for arg2 := range secondArgs { - in := []interface{}{arg1, arg2} - strIn := []interface{}{contract.StringifyArg(arg1), contract.StringifyArg(arg2)} - - var out interface{} - err := p.bc.FetchContractData(p.contract.Abi, p.contract.Address, m.Name, in, &out, bn) - if err != nil { - return fmt.Errorf("poller error calling 2 argument method\r\nblock: %d, method: %s, contract: %s\r\nerr: %v", bn, m.Name, p.contract.Address, err) - } - strOut, err := stringify(out) - if err != nil { - return err - } - p.cache(out) - - result.Output = strOut - result.Inputs = strIn - results = append(results, result) - } - } - - err := p.PersistResults(results, m, p.contract.Address, p.contract.Name) - if err != nil { - return fmt.Errorf("poller error persisting 2 argument method result\r\nblock: %d, method: %s, contract: %s\r\nerr: %v", bn, m.Name, p.contract.Address, err) - } - - return nil -} - -// FetchContractData is just a wrapper around the poller blockchain's FetchContractData method -func (p *poller) FetchContractData(contractAbi, contractAddress, method string, methodArgs []interface{}, result interface{}, blockNumber int64) error { - return p.bc.FetchContractData(contractAbi, contractAddress, method, methodArgs, result, blockNumber) -} - -// This is used to cache a method return value if method piping is turned on -func (p *poller) cache(out interface{}) { - if p.contract.Piping { - switch out.(type) { - case common.Hash: - if p.contract.EmittedHashes != nil { - p.contract.AddEmittedHash(out.(common.Hash)) - } - case []byte: - if p.contract.EmittedHashes != nil && len(out.([]byte)) == 32 { - p.contract.AddEmittedHash(common.BytesToHash(out.([]byte))) - } - case common.Address: - if p.contract.EmittedAddrs != nil { - p.contract.AddEmittedAddr(out.(common.Address)) - } - default: - } - } -} - -func stringify(input interface{}) (string, error) { - switch input.(type) { - case *big.Int: - b := input.(*big.Int) - return b.String(), nil - case common.Address: - a := input.(common.Address) - return a.String(), nil - case common.Hash: - h := input.(common.Hash) - return h.String(), nil - case string: - return input.(string), nil - case []byte: - b := hexutil.Encode(input.([]byte)) - return b, nil - case byte: - b := input.(byte) - return string(b), nil - case bool: - return strconv.FormatBool(input.(bool)), nil - default: - return "", errors.New("error: unhandled return type") - } -} diff --git a/pkg/eth/contract_watcher/shared/repository/event_repository.go b/pkg/eth/contract_watcher/shared/repository/event_repository.go deleted file mode 100644 index 3653ca93..00000000 --- a/pkg/eth/contract_watcher/shared/repository/event_repository.go +++ /dev/null @@ -1,316 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package repository - -import ( - "errors" - "fmt" - "strings" - - "github.com/hashicorp/golang-lru" - "github.com/sirupsen/logrus" - - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/types" - "github.com/vulcanize/vulcanizedb/pkg/postgres" -) - -const ( - // Number of contract address and method ids to keep in cache - contractCacheSize = 100 - eventCacheSize = 1000 -) - -// EventRepository is used to persist event data into custom tables -type EventRepository interface { - PersistLogs(logs []types.Log, eventInfo types.Event, contractAddr, contractName string) error - CreateEventTable(contractAddr string, event types.Event) (bool, error) - CreateContractSchema(contractName string) (bool, error) - CheckSchemaCache(key string) (interface{}, bool) - CheckTableCache(key string) (interface{}, bool) -} - -type eventRepository struct { - db *postgres.DB - mode types.Mode - schemas *lru.Cache // Cache names of recently used schemas to minimize db connections - tables *lru.Cache // Cache names of recently used tables to minimize db connections -} - -// NewEventRepository returns a new EventRepository -func NewEventRepository(db *postgres.DB, mode types.Mode) EventRepository { - ccs, _ := lru.New(contractCacheSize) - ecs, _ := lru.New(eventCacheSize) - return &eventRepository{ - db: db, - mode: mode, - schemas: ccs, - tables: ecs, - } -} - -// PersistLogs creates a schema for the contract if needed -// Creates table for the watched contract event if needed -// Persists converted event log data into this custom table -func (r *eventRepository) PersistLogs(logs []types.Log, eventInfo types.Event, contractAddr, contractName string) error { - if len(logs) == 0 { - return errors.New("event repository error: passed empty logs slice") - } - _, schemaErr := r.CreateContractSchema(contractAddr) - if schemaErr != nil { - return fmt.Errorf("error creating schema for contract %s: %s", contractAddr, schemaErr.Error()) - } - - _, tableErr := r.CreateEventTable(contractAddr, eventInfo) - if tableErr != nil { - return fmt.Errorf("error creating table for event %s on contract %s: %s", eventInfo.Name, contractAddr, tableErr.Error()) - } - - return r.persistLogs(logs, eventInfo, contractAddr, contractName) -} - -func (r *eventRepository) persistLogs(logs []types.Log, eventInfo types.Event, contractAddr, contractName string) error { - var err error - switch r.mode { - case types.HeaderSync: - err = r.persistHeaderSyncLogs(logs, eventInfo, contractAddr, contractName) - case types.FullSync: - err = r.persistFullSyncLogs(logs, eventInfo, contractAddr, contractName) - default: - return errors.New("event repository error: unhandled mode") - } - - return err -} - -// Creates a custom postgres command to persist logs for the given event (compatible with header synced vDB) -func (r *eventRepository) persistHeaderSyncLogs(logs []types.Log, eventInfo types.Event, contractAddr, contractName string) error { - tx, txErr := r.db.Beginx() - if txErr != nil { - return fmt.Errorf("error beginning db transaction: %s", txErr.Error()) - } - - for _, event := range logs { - // Begin pg query string - pgStr := fmt.Sprintf("INSERT INTO %s_%s.%s_event ", r.mode.String(), strings.ToLower(contractAddr), strings.ToLower(eventInfo.Name)) - pgStr = pgStr + "(header_id, token_name, raw_log, log_idx, tx_idx" - el := len(event.Values) - - // Preallocate slice of needed capacity and proceed to pack variables into it in same order they appear in string - data := make([]interface{}, 0, 5+el) - data = append(data, - event.ID, - contractName, - event.Raw, - event.LogIndex, - event.TransactionIndex) - - // Iterate over inputs and append name to query string and value to input data - for inputName, input := range event.Values { - pgStr = pgStr + fmt.Sprintf(", %s_", strings.ToLower(inputName)) // Add underscore after to avoid any collisions with reserved pg words - data = append(data, input) - } - - // For each input entry we created we add its postgres command variable to the string - pgStr = pgStr + ") VALUES ($1, $2, $3, $4, $5" - for i := 0; i < el; i++ { - pgStr = pgStr + fmt.Sprintf(", $%d", i+6) - } - pgStr = pgStr + ") ON CONFLICT DO NOTHING" - - logrus.Tracef("query for inserting log: %s", pgStr) - // Add this query to the transaction - _, execErr := tx.Exec(pgStr, data...) - if execErr != nil { - rollbackErr := tx.Rollback() - if rollbackErr != nil { - logrus.Warnf("error rolling back transactions while persisting logs: %s", rollbackErr.Error()) - } - return fmt.Errorf("error executing query: %s", execErr.Error()) - } - } - - return tx.Commit() -} - -// Creates a custom postgres command to persist logs for the given event (compatible with fully synced vDB) -func (r *eventRepository) persistFullSyncLogs(logs []types.Log, eventInfo types.Event, contractAddr, contractName string) error { - tx, txErr := r.db.Beginx() - if txErr != nil { - return fmt.Errorf("error beginning db transaction: %s", txErr.Error()) - } - - for _, event := range logs { - pgStr := fmt.Sprintf("INSERT INTO %s_%s.%s_event ", r.mode.String(), strings.ToLower(contractAddr), strings.ToLower(eventInfo.Name)) - pgStr = pgStr + "(vulcanize_log_id, token_name, block, tx" - el := len(event.Values) - - data := make([]interface{}, 0, 4+el) - data = append(data, - event.ID, - contractName, - event.Block, - event.Tx) - - for inputName, input := range event.Values { - pgStr = pgStr + fmt.Sprintf(", %s_", strings.ToLower(inputName)) - data = append(data, input) - } - - pgStr = pgStr + ") VALUES ($1, $2, $3, $4" - for i := 0; i < el; i++ { - pgStr = pgStr + fmt.Sprintf(", $%d", i+5) - } - pgStr = pgStr + ") ON CONFLICT (vulcanize_log_id) DO NOTHING" - - logrus.Tracef("query for inserting log: %s", pgStr) - _, execErr := tx.Exec(pgStr, data...) - if execErr != nil { - rollbackErr := tx.Rollback() - if rollbackErr != nil { - logrus.Warnf("error rolling back transactions while persisting logs: %s", rollbackErr.Error()) - } - return fmt.Errorf("error executing query: %s", execErr.Error()) - } - } - - return tx.Commit() -} - -// CreateEventTable checks for event table and creates it if it does not already exist -// Returns true if it created a new table; returns false if table already existed -func (r *eventRepository) CreateEventTable(contractAddr string, event types.Event) (bool, error) { - tableID := fmt.Sprintf("%s_%s.%s_event", r.mode.String(), strings.ToLower(contractAddr), strings.ToLower(event.Name)) - // Check cache before querying pq to see if table exists - _, ok := r.tables.Get(tableID) - if ok { - return false, nil - } - tableExists, checkTableErr := r.checkForTable(contractAddr, event.Name) - if checkTableErr != nil { - return false, fmt.Errorf("error checking for table: %s", checkTableErr) - } - - if !tableExists { - createTableErr := r.newEventTable(tableID, event) - if createTableErr != nil { - return false, fmt.Errorf("error creating table: %s", createTableErr.Error()) - } - } - - // Add table id to cache - r.tables.Add(tableID, true) - - return !tableExists, nil -} - -// Creates a table for the given contract and event -func (r *eventRepository) newEventTable(tableID string, event types.Event) error { - // Begin pg string - var pgStr = fmt.Sprintf("CREATE TABLE IF NOT EXISTS %s ", tableID) - var err error - - // Handle different modes - switch r.mode { - case types.FullSync: - pgStr = pgStr + "(id SERIAL, vulcanize_log_id INTEGER NOT NULL UNIQUE, token_name CHARACTER VARYING(66) NOT NULL, block INTEGER NOT NULL, tx CHARACTER VARYING(66) NOT NULL," - - // Iterate over event fields, using their name and pgType to grow the string - for _, field := range event.Fields { - pgStr = pgStr + fmt.Sprintf(" %s_ %s NOT NULL,", strings.ToLower(field.Name), field.PgType) - } - pgStr = pgStr + " CONSTRAINT log_index_fk FOREIGN KEY (vulcanize_log_id) REFERENCES full_sync_logs (id) ON DELETE CASCADE)" - case types.HeaderSync: - pgStr = pgStr + "(id SERIAL, header_id INTEGER NOT NULL REFERENCES headers (id) ON DELETE CASCADE, token_name CHARACTER VARYING(66) NOT NULL, raw_log JSONB, log_idx INTEGER NOT NULL, tx_idx INTEGER NOT NULL," - - for _, field := range event.Fields { - pgStr = pgStr + fmt.Sprintf(" %s_ %s NOT NULL,", strings.ToLower(field.Name), field.PgType) - } - pgStr = pgStr + " UNIQUE (header_id, tx_idx, log_idx))" - default: - return errors.New("unhandled repository mode") - } - - _, err = r.db.Exec(pgStr) - - return err -} - -// Checks if a table already exists for the given contract and event -func (r *eventRepository) checkForTable(contractAddr string, eventName string) (bool, error) { - pgStr := fmt.Sprintf("SELECT EXISTS (SELECT 1 FROM information_schema.tables WHERE table_schema = '%s_%s' AND table_name = '%s_event')", r.mode.String(), strings.ToLower(contractAddr), strings.ToLower(eventName)) - - var exists bool - err := r.db.Get(&exists, pgStr) - - return exists, err -} - -// CreateContractSchema checks for contract schema and creates it if it does not already exist -// Returns true if it created a new schema; returns false if schema already existed -func (r *eventRepository) CreateContractSchema(contractAddr string) (bool, error) { - if contractAddr == "" { - return false, errors.New("error: no contract address specified") - } - - // Check cache before querying pq to see if schema exists - _, ok := r.schemas.Get(contractAddr) - if ok { - return false, nil - } - schemaExists, checkSchemaErr := r.checkForSchema(contractAddr) - if checkSchemaErr != nil { - return false, fmt.Errorf("error checking for schema: %s", checkSchemaErr.Error()) - } - if !schemaExists { - createSchemaErr := r.newContractSchema(contractAddr) - if createSchemaErr != nil { - return false, fmt.Errorf("error creating schema: %s", createSchemaErr.Error()) - } - } - - // Add schema name to cache - r.schemas.Add(contractAddr, true) - - return !schemaExists, nil -} - -// Creates a schema for the given contract -func (r *eventRepository) newContractSchema(contractAddr string) error { - _, err := r.db.Exec("CREATE SCHEMA IF NOT EXISTS " + r.mode.String() + "_" + strings.ToLower(contractAddr)) - - return err -} - -// Checks if a schema already exists for the given contract -func (r *eventRepository) checkForSchema(contractAddr string) (bool, error) { - pgStr := fmt.Sprintf("SELECT EXISTS (SELECT schema_name FROM information_schema.schemata WHERE schema_name = '%s_%s')", r.mode.String(), strings.ToLower(contractAddr)) - - var exists bool - err := r.db.QueryRow(pgStr).Scan(&exists) - - return exists, err -} - -// CheckSchemaCache is used to query the schema name cache -func (r *eventRepository) CheckSchemaCache(key string) (interface{}, bool) { - return r.schemas.Get(key) -} - -// CheckTableCache is used to query the table name cache -func (r *eventRepository) CheckTableCache(key string) (interface{}, bool) { - return r.tables.Get(key) -} diff --git a/pkg/eth/contract_watcher/shared/repository/method_repository.go b/pkg/eth/contract_watcher/shared/repository/method_repository.go deleted file mode 100644 index 711b36e2..00000000 --- a/pkg/eth/contract_watcher/shared/repository/method_repository.go +++ /dev/null @@ -1,235 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package repository - -import ( - "errors" - "fmt" - "strings" - - "github.com/hashicorp/golang-lru" - "github.com/sirupsen/logrus" - - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/types" - "github.com/vulcanize/vulcanizedb/pkg/postgres" -) - -const methodCacheSize = 1000 - -// MethodRepository is used to persist public getter method data -type MethodRepository interface { - PersistResults(results []types.Result, methodInfo types.Method, contractAddr, contractName string) error - CreateMethodTable(contractAddr string, method types.Method) (bool, error) - CreateContractSchema(contractAddr string) (bool, error) - CheckSchemaCache(key string) (interface{}, bool) - CheckTableCache(key string) (interface{}, bool) -} - -type methodRepository struct { - *postgres.DB - mode types.Mode - schemas *lru.Cache // Cache names of recently used schemas to minimize db connections - tables *lru.Cache // Cache names of recently used tables to minimize db connections -} - -// NewMethodRepository returns a new MethodRepository -func NewMethodRepository(db *postgres.DB, mode types.Mode) MethodRepository { - ccs, _ := lru.New(contractCacheSize) - mcs, _ := lru.New(methodCacheSize) - return &methodRepository{ - DB: db, - mode: mode, - schemas: ccs, - tables: mcs, - } -} - -// PersistResults creates a schema for the contract if needed -// Creates table for the contract method if needed -// Persists method polling data into this custom table -func (r *methodRepository) PersistResults(results []types.Result, methodInfo types.Method, contractAddr, contractName string) error { - if len(results) == 0 { - return errors.New("method repository error: passed empty results slice") - } - _, err := r.CreateContractSchema(contractAddr) - if err != nil { - return err - } - - _, err = r.CreateMethodTable(contractAddr, methodInfo) - if err != nil { - return err - } - - return r.persistResults(results, methodInfo, contractAddr, contractName) -} - -// Creates a custom postgres command to persist logs for the given event -func (r *methodRepository) persistResults(results []types.Result, methodInfo types.Method, contractAddr, contractName string) error { - tx, err := r.DB.Beginx() - if err != nil { - return err - } - - for _, result := range results { - // Begin postgres string - pgStr := fmt.Sprintf("INSERT INTO %s_%s.%s_method ", r.mode.String(), strings.ToLower(contractAddr), strings.ToLower(result.Name)) - pgStr = pgStr + "(token_name, block" - ml := len(result.Args) - - // Preallocate slice of needed capacity and proceed to pack variables into it in same order they appear in string - data := make([]interface{}, 0, 3+ml) - data = append(data, - contractName, - result.Block) - - // Iterate over method args and return value, adding names - // to the string and pushing values to the slice - for i, arg := range result.Args { - pgStr = pgStr + fmt.Sprintf(", %s_", strings.ToLower(arg.Name)) // Add underscore after to avoid any collisions with reserved pg words - data = append(data, result.Inputs[i]) - } - pgStr = pgStr + ", returned) VALUES ($1, $2" - data = append(data, result.Output) - - // For each input entry we created we add its postgres command variable to the string - for i := 0; i <= ml; i++ { - pgStr = pgStr + fmt.Sprintf(", $%d", i+3) - } - pgStr = pgStr + ")" - - // Add this query to the transaction - _, err = tx.Exec(pgStr, data...) - if err != nil { - rollbackErr := tx.Rollback() - if rollbackErr != nil { - logrus.Warnf("error rolling back transaction: %s", rollbackErr.Error()) - } - return err - } - } - - return tx.Commit() -} - -// CreateMethodTable checks for event table and creates it if it does not already exist -func (r *methodRepository) CreateMethodTable(contractAddr string, method types.Method) (bool, error) { - tableID := fmt.Sprintf("%s_%s.%s_method", r.mode.String(), strings.ToLower(contractAddr), strings.ToLower(method.Name)) - - // Check cache before querying pq to see if table exists - _, ok := r.tables.Get(tableID) - if ok { - return false, nil - } - tableExists, err := r.checkForTable(contractAddr, method.Name) - if err != nil { - return false, err - } - if !tableExists { - err = r.newMethodTable(tableID, method) - if err != nil { - return false, err - } - } - - // Add schema name to cache - r.tables.Add(tableID, true) - - return !tableExists, nil -} - -// Creates a table for the given contract and event -func (r *methodRepository) newMethodTable(tableID string, method types.Method) error { - // Begin pg string - pgStr := fmt.Sprintf("CREATE TABLE IF NOT EXISTS %s ", tableID) - pgStr = pgStr + "(id SERIAL, token_name CHARACTER VARYING(66) NOT NULL, block INTEGER NOT NULL," - - // Iterate over method inputs and outputs, using their name and pgType to grow the string - for _, arg := range method.Args { - pgStr = pgStr + fmt.Sprintf(" %s_ %s NOT NULL,", strings.ToLower(arg.Name), arg.PgType) - } - - pgStr = pgStr + fmt.Sprintf(" returned %s NOT NULL)", method.Return[0].PgType) - - _, err := r.DB.Exec(pgStr) - - return err -} - -// Checks if a table already exists for the given contract and event -func (r *methodRepository) checkForTable(contractAddr string, methodName string) (bool, error) { - pgStr := fmt.Sprintf("SELECT EXISTS (SELECT 1 FROM information_schema.tables WHERE table_schema = '%s_%s' AND table_name = '%s_method')", r.mode.String(), strings.ToLower(contractAddr), strings.ToLower(methodName)) - var exists bool - err := r.DB.Get(&exists, pgStr) - - return exists, err -} - -// CreateContractSchema checks for contract schema and creates it if it does not already exist -func (r *methodRepository) CreateContractSchema(contractAddr string) (bool, error) { - if contractAddr == "" { - return false, errors.New("error: no contract address specified") - } - - // Check cache before querying pq to see if schema exists - _, ok := r.schemas.Get(contractAddr) - if ok { - return false, nil - } - schemaExists, err := r.checkForSchema(contractAddr) - if err != nil { - return false, err - } - if !schemaExists { - err = r.newContractSchema(contractAddr) - if err != nil { - return false, err - } - } - - // Add schema name to cache - r.schemas.Add(contractAddr, true) - - return !schemaExists, nil -} - -// Creates a schema for the given contract -func (r *methodRepository) newContractSchema(contractAddr string) error { - _, err := r.DB.Exec("CREATE SCHEMA IF NOT EXISTS " + r.mode.String() + "_" + strings.ToLower(contractAddr)) - - return err -} - -// Checks if a schema already exists for the given contract -func (r *methodRepository) checkForSchema(contractAddr string) (bool, error) { - pgStr := fmt.Sprintf("SELECT EXISTS (SELECT schema_name FROM information_schema.schemata WHERE schema_name = '%s_%s')", r.mode.String(), strings.ToLower(contractAddr)) - - var exists bool - err := r.DB.Get(&exists, pgStr) - - return exists, err -} - -// CheckSchemaCache is used to query the schema name cache -func (r *methodRepository) CheckSchemaCache(key string) (interface{}, bool) { - return r.schemas.Get(key) -} - -// CheckTableCache is used to query the table name cache -func (r *methodRepository) CheckTableCache(key string) (interface{}, bool) { - return r.tables.Get(key) -} diff --git a/pkg/eth/contract_watcher/shared/repository/method_repository_test.go b/pkg/eth/contract_watcher/shared/repository/method_repository_test.go deleted file mode 100644 index 3fc43736..00000000 --- a/pkg/eth/contract_watcher/shared/repository/method_repository_test.go +++ /dev/null @@ -1,240 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package repository_test - -import ( - "fmt" - "strings" - - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/constants" - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/contract" - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/helpers/test_helpers" - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/repository" - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/types" - "github.com/vulcanize/vulcanizedb/pkg/postgres" -) - -var _ = Describe("Repository", func() { - var db *postgres.DB - var dataStore repository.MethodRepository - var con *contract.Contract - var err error - var mockResult types.Result - var method types.Method - - BeforeEach(func() { - con = test_helpers.SetupTusdContract([]string{}, []string{"balanceOf"}) - Expect(len(con.Methods)).To(Equal(1)) - method = con.Methods[0] - mockResult = types.Result{ - Method: method, - PgType: method.Return[0].PgType, - Inputs: make([]interface{}, 1), - Output: new(interface{}), - Block: 6707323, - } - mockResult.Inputs[0] = "0xfE9e8709d3215310075d67E3ed32A380CCf451C8" - mockResult.Output = "66386309548896882859581786" - db, _ = test_helpers.SetupDBandBC() - dataStore = repository.NewMethodRepository(db, types.FullSync) - }) - - AfterEach(func() { - test_helpers.TearDown(db) - }) - - Describe("Full Sync Mode", func() { - BeforeEach(func() { - dataStore = repository.NewMethodRepository(db, types.FullSync) - }) - - Describe("CreateContractSchema", func() { - It("Creates schema if it doesn't exist", func() { - created, err := dataStore.CreateContractSchema(constants.TusdContractAddress) - Expect(err).ToNot(HaveOccurred()) - Expect(created).To(Equal(true)) - - created, err = dataStore.CreateContractSchema(constants.TusdContractAddress) - Expect(err).ToNot(HaveOccurred()) - Expect(created).To(Equal(false)) - }) - - It("Caches schema it creates so that it does not need to repeatedly query the database to check for it's existence", func() { - _, ok := dataStore.CheckSchemaCache(con.Address) - Expect(ok).To(Equal(false)) - - created, err := dataStore.CreateContractSchema(con.Address) - Expect(err).ToNot(HaveOccurred()) - Expect(created).To(Equal(true)) - - v, ok := dataStore.CheckSchemaCache(con.Address) - Expect(ok).To(Equal(true)) - Expect(v).To(Equal(true)) - }) - }) - - Describe("CreateMethodTable", func() { - It("Creates table if it doesn't exist", func() { - created, err := dataStore.CreateContractSchema(constants.TusdContractAddress) - Expect(err).ToNot(HaveOccurred()) - Expect(created).To(Equal(true)) - - created, err = dataStore.CreateMethodTable(constants.TusdContractAddress, method) - Expect(err).ToNot(HaveOccurred()) - Expect(created).To(Equal(true)) - - created, err = dataStore.CreateMethodTable(constants.TusdContractAddress, method) - Expect(err).ToNot(HaveOccurred()) - Expect(created).To(Equal(false)) - }) - - It("Caches table it creates so that it does not need to repeatedly query the database to check for it's existence", func() { - created, err := dataStore.CreateContractSchema(con.Address) - Expect(err).ToNot(HaveOccurred()) - Expect(created).To(Equal(true)) - - tableID := fmt.Sprintf("%s_%s.%s_method", types.FullSync, strings.ToLower(con.Address), strings.ToLower(method.Name)) - _, ok := dataStore.CheckTableCache(tableID) - Expect(ok).To(Equal(false)) - - created, err = dataStore.CreateMethodTable(con.Address, method) - Expect(err).ToNot(HaveOccurred()) - Expect(created).To(Equal(true)) - - v, ok := dataStore.CheckTableCache(tableID) - Expect(ok).To(Equal(true)) - Expect(v).To(Equal(true)) - }) - }) - - Describe("PersistResult", func() { - It("Persists result from method polling in custom pg table", func() { - err = dataStore.PersistResults([]types.Result{mockResult}, method, con.Address, con.Name) - Expect(err).ToNot(HaveOccurred()) - - scanStruct := test_helpers.BalanceOf{} - - err = db.QueryRowx(fmt.Sprintf("SELECT * FROM full_%s.balanceof_method", constants.TusdContractAddress)).StructScan(&scanStruct) - expectedLog := test_helpers.BalanceOf{ - ID: 1, - TokenName: "TrueUSD", - Block: 6707323, - Address: "0xfE9e8709d3215310075d67E3ed32A380CCf451C8", - Balance: "66386309548896882859581786", - } - Expect(scanStruct).To(Equal(expectedLog)) - }) - - It("Fails with empty result", func() { - err = dataStore.PersistResults([]types.Result{}, method, con.Address, con.Name) - Expect(err).To(HaveOccurred()) - }) - }) - }) - - Describe("Header Sync Mode", func() { - BeforeEach(func() { - dataStore = repository.NewMethodRepository(db, types.HeaderSync) - }) - - Describe("CreateContractSchema", func() { - It("Creates schema if it doesn't exist", func() { - created, err := dataStore.CreateContractSchema(constants.TusdContractAddress) - Expect(err).ToNot(HaveOccurred()) - Expect(created).To(Equal(true)) - - created, err = dataStore.CreateContractSchema(constants.TusdContractAddress) - Expect(err).ToNot(HaveOccurred()) - Expect(created).To(Equal(false)) - }) - - It("Caches schema it creates so that it does not need to repeatedly query the database to check for it's existence", func() { - _, ok := dataStore.CheckSchemaCache(con.Address) - Expect(ok).To(Equal(false)) - - created, err := dataStore.CreateContractSchema(con.Address) - Expect(err).ToNot(HaveOccurred()) - Expect(created).To(Equal(true)) - - v, ok := dataStore.CheckSchemaCache(con.Address) - Expect(ok).To(Equal(true)) - Expect(v).To(Equal(true)) - }) - }) - - Describe("CreateMethodTable", func() { - It("Creates table if it doesn't exist", func() { - created, err := dataStore.CreateContractSchema(constants.TusdContractAddress) - Expect(err).ToNot(HaveOccurred()) - Expect(created).To(Equal(true)) - - created, err = dataStore.CreateMethodTable(constants.TusdContractAddress, method) - Expect(err).ToNot(HaveOccurred()) - Expect(created).To(Equal(true)) - - created, err = dataStore.CreateMethodTable(constants.TusdContractAddress, method) - Expect(err).ToNot(HaveOccurred()) - Expect(created).To(Equal(false)) - }) - - It("Caches table it creates so that it does not need to repeatedly query the database to check for it's existence", func() { - created, err := dataStore.CreateContractSchema(con.Address) - Expect(err).ToNot(HaveOccurred()) - Expect(created).To(Equal(true)) - - tableID := fmt.Sprintf("%s_%s.%s_method", types.HeaderSync, strings.ToLower(con.Address), strings.ToLower(method.Name)) - _, ok := dataStore.CheckTableCache(tableID) - Expect(ok).To(Equal(false)) - - created, err = dataStore.CreateMethodTable(con.Address, method) - Expect(err).ToNot(HaveOccurred()) - Expect(created).To(Equal(true)) - - v, ok := dataStore.CheckTableCache(tableID) - Expect(ok).To(Equal(true)) - Expect(v).To(Equal(true)) - }) - }) - - Describe("PersistResult", func() { - It("Persists result from method polling in custom pg table for header sync mode vDB", func() { - err = dataStore.PersistResults([]types.Result{mockResult}, method, con.Address, con.Name) - Expect(err).ToNot(HaveOccurred()) - - scanStruct := test_helpers.BalanceOf{} - - err = db.QueryRowx(fmt.Sprintf("SELECT * FROM header_%s.balanceof_method", constants.TusdContractAddress)).StructScan(&scanStruct) - expectedLog := test_helpers.BalanceOf{ - ID: 1, - TokenName: "TrueUSD", - Block: 6707323, - Address: "0xfE9e8709d3215310075d67E3ed32A380CCf451C8", - Balance: "66386309548896882859581786", - } - Expect(scanStruct).To(Equal(expectedLog)) - }) - - It("Fails with empty result", func() { - err = dataStore.PersistResults([]types.Result{}, method, con.Address, con.Name) - Expect(err).To(HaveOccurred()) - }) - }) - }) -}) diff --git a/pkg/eth/contract_watcher/shared/repository/repository_suite_test.go b/pkg/eth/contract_watcher/shared/repository/repository_suite_test.go deleted file mode 100644 index 707e1917..00000000 --- a/pkg/eth/contract_watcher/shared/repository/repository_suite_test.go +++ /dev/null @@ -1,35 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package repository_test - -import ( - "io/ioutil" - "testing" - - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - "github.com/sirupsen/logrus" -) - -func TestRepository(t *testing.T) { - RegisterFailHandler(Fail) - RunSpecs(t, "Shared Repository Suite Test") -} - -var _ = BeforeSuite(func() { - logrus.SetOutput(ioutil.Discard) -}) diff --git a/pkg/eth/contract_watcher/shared/retriever/address_retriever.go b/pkg/eth/contract_watcher/shared/retriever/address_retriever.go deleted file mode 100644 index f24340eb..00000000 --- a/pkg/eth/contract_watcher/shared/retriever/address_retriever.go +++ /dev/null @@ -1,121 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package retriever - -import ( - "fmt" - "strings" - - "github.com/ethereum/go-ethereum/accounts/abi" - "github.com/ethereum/go-ethereum/common" - - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/contract" - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/types" - "github.com/vulcanize/vulcanizedb/pkg/postgres" -) - -// AddressRetriever is used to retrieve the addresses associated with a contract -type AddressRetriever interface { - RetrieveTokenHolderAddresses(info contract.Contract) (map[common.Address]bool, error) -} - -type addressRetriever struct { - db *postgres.DB - mode types.Mode -} - -// NewAddressRetriever returns a new AddressRetriever -func NewAddressRetriever(db *postgres.DB, mode types.Mode) AddressRetriever { - return &addressRetriever{ - db: db, - mode: mode, - } -} - -// RetrieveTokenHolderAddresses is used to retrieve list of token-holding/contract-related addresses by iterating over available events -// This generic method should work whether or not the argument/input names of the events meet the expected standard -// This could be generalized to iterate over ALL events and pull out any address arguments -func (r *addressRetriever) RetrieveTokenHolderAddresses(info contract.Contract) (map[common.Address]bool, error) { - addrList := make([]string, 0) - - _, ok := info.Filters["Transfer"] - if ok { - addrs, err := r.retrieveTransferAddresses(info) - if err != nil { - return nil, err - } - addrList = append(addrList, addrs...) - } - - _, ok = info.Filters["Mint"] - if ok { - addrs, err := r.retrieveTokenMintees(info) - if err != nil { - return nil, err - } - addrList = append(addrList, addrs...) - } - - contractAddresses := make(map[common.Address]bool) - for _, addr := range addrList { - contractAddresses[common.HexToAddress(addr)] = true - } - - return contractAddresses, nil -} - -func (r *addressRetriever) retrieveTransferAddresses(con contract.Contract) ([]string, error) { - transferAddrs := make([]string, 0) - event := con.Events["Transfer"] - - for _, field := range event.Fields { // Iterate over event fields, finding the ones with address type - - if field.Type.T == abi.AddressTy { // If they have address type, retrieve those addresses - addrs := make([]string, 0) - pgStr := fmt.Sprintf("SELECT %s_ FROM %s_%s.%s_event", strings.ToLower(field.Name), r.mode.String(), strings.ToLower(con.Address), strings.ToLower(event.Name)) - err := r.db.Select(&addrs, pgStr) - if err != nil { - return []string{}, err - } - - transferAddrs = append(transferAddrs, addrs...) // And append them to the growing list - } - } - - return transferAddrs, nil -} - -func (r *addressRetriever) retrieveTokenMintees(con contract.Contract) ([]string, error) { - mintAddrs := make([]string, 0) - event := con.Events["Mint"] - - for _, field := range event.Fields { // Iterate over event fields, finding the ones with address type - - if field.Type.T == abi.AddressTy { // If they have address type, retrieve those addresses - addrs := make([]string, 0) - pgStr := fmt.Sprintf("SELECT %s_ FROM %s_%s.%s_event", strings.ToLower(field.Name), r.mode.String(), strings.ToLower(con.Address), strings.ToLower(event.Name)) - err := r.db.Select(&addrs, pgStr) - if err != nil { - return []string{}, err - } - - mintAddrs = append(mintAddrs, addrs...) // And append them to the growing list - } - } - - return mintAddrs, nil -} diff --git a/pkg/eth/contract_watcher/shared/types/event.go b/pkg/eth/contract_watcher/shared/types/event.go deleted file mode 100644 index d5474828..00000000 --- a/pkg/eth/contract_watcher/shared/types/event.go +++ /dev/null @@ -1,99 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package types - -import ( - "fmt" - "strings" - - "github.com/ethereum/go-ethereum/accounts/abi" - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/crypto" -) - -// Event is our custom event type -type Event struct { - Name string - Anonymous bool - Fields []Field -} - -// Field is our custom event field type which associates a postgres type with the field -type Field struct { - abi.Argument // Name, Type, Indexed - PgType string // Holds type used when committing data held in this field to postgres -} - -// Log is used to hold instance of an event log data -type Log struct { - ID int64 // VulcanizeIdLog for full sync and header ID for header sync contract watcher - Values map[string]string // Map of event input names to their values - - // Used for full sync only - Block int64 - Tx string - - // Used for headerSync only - LogIndex uint - TransactionIndex uint - Raw []byte // json.Unmarshalled byte array of geth/core/types.Log{} -} - -// NewEvent unpacks abi.Event into our custom Event struct -func NewEvent(e abi.Event) Event { - fields := make([]Field, len(e.Inputs)) - for i, input := range e.Inputs { - fields[i] = Field{} - fields[i].Name = input.Name - fields[i].Type = input.Type - fields[i].Indexed = input.Indexed - // Fill in pg type based on abi type - switch fields[i].Type.T { - case abi.HashTy, abi.AddressTy: - fields[i].PgType = "CHARACTER VARYING(66)" - case abi.IntTy, abi.UintTy: - fields[i].PgType = "NUMERIC" - case abi.BoolTy: - fields[i].PgType = "BOOLEAN" - case abi.BytesTy, abi.FixedBytesTy: - fields[i].PgType = "BYTEA" - case abi.ArrayTy: - fields[i].PgType = "TEXT[]" - case abi.FixedPointTy: - fields[i].PgType = "MONEY" // use shopspring/decimal for fixed point numbers in go and money type in postgres? - default: - fields[i].PgType = "TEXT" - } - } - - return Event{ - Name: e.Name, - Anonymous: e.Anonymous, - Fields: fields, - } -} - -// Sig returns the hash signature for an event -func (e Event) Sig() common.Hash { - types := make([]string, len(e.Fields)) - - for i, input := range e.Fields { - types[i] = input.Type.String() - } - - return crypto.Keccak256Hash([]byte(fmt.Sprintf("%v(%v)", e.Name, strings.Join(types, ",")))) -} diff --git a/pkg/eth/contract_watcher/shared/types/method.go b/pkg/eth/contract_watcher/shared/types/method.go deleted file mode 100644 index 293b3594..00000000 --- a/pkg/eth/contract_watcher/shared/types/method.go +++ /dev/null @@ -1,113 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package types - -import ( - "fmt" - "strings" - - "github.com/ethereum/go-ethereum/accounts/abi" - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/crypto" -) - -// Method is our custom method struct -type Method struct { - Name string - Const bool - Args []Field - Return []Field -} - -// Result is used to hold instance of result from method call with given inputs and block -type Result struct { - Method - Inputs []interface{} // Will only use addresses - Output interface{} - PgType string // Holds output pg type - Block int64 -} - -// NewMethod unpacks abi.Method into our custom Method struct -func NewMethod(m abi.Method) Method { - inputs := make([]Field, len(m.Inputs)) - for i, input := range m.Inputs { - inputs[i] = Field{} - inputs[i].Name = input.Name - inputs[i].Type = input.Type - inputs[i].Indexed = input.Indexed - switch inputs[i].Type.T { - case abi.HashTy, abi.AddressTy: - inputs[i].PgType = "CHARACTER VARYING(66)" - case abi.IntTy, abi.UintTy: - inputs[i].PgType = "NUMERIC" - case abi.BoolTy: - inputs[i].PgType = "BOOLEAN" - case abi.BytesTy, abi.FixedBytesTy: - inputs[i].PgType = "BYTEA" - case abi.ArrayTy: - inputs[i].PgType = "TEXT[]" - case abi.FixedPointTy: - inputs[i].PgType = "MONEY" // use shopspring/decimal for fixed point numbers in go and money type in postgres? - default: - inputs[i].PgType = "TEXT" - } - } - - outputs := make([]Field, len(m.Outputs)) - for i, output := range m.Outputs { - outputs[i] = Field{} - outputs[i].Name = output.Name - outputs[i].Type = output.Type - outputs[i].Indexed = output.Indexed - switch outputs[i].Type.T { - case abi.HashTy, abi.AddressTy: - outputs[i].PgType = "CHARACTER VARYING(66)" - case abi.IntTy, abi.UintTy: - outputs[i].PgType = "NUMERIC" - case abi.BoolTy: - outputs[i].PgType = "BOOLEAN" - case abi.BytesTy, abi.FixedBytesTy: - outputs[i].PgType = "BYTEA" - case abi.ArrayTy: - outputs[i].PgType = "TEXT[]" - case abi.FixedPointTy: - outputs[i].PgType = "MONEY" // use shopspring/decimal for fixed point numbers in go and money type in postgres? - default: - outputs[i].PgType = "TEXT" - } - } - - return Method{ - Name: m.Name, - Const: m.Const, - Args: inputs, - Return: outputs, - } -} - -// Sig returns the hash signature for the method -func (m Method) Sig() common.Hash { - types := make([]string, len(m.Args)) - i := 0 - for _, arg := range m.Args { - types[i] = arg.Type.String() - i++ - } - - return crypto.Keccak256Hash([]byte(fmt.Sprintf("%v(%v)", m.Name, strings.Join(types, ",")))) -} diff --git a/pkg/eth/contract_watcher/shared/types/mode.go b/pkg/eth/contract_watcher/shared/types/mode.go deleted file mode 100644 index 667e66f2..00000000 --- a/pkg/eth/contract_watcher/shared/types/mode.go +++ /dev/null @@ -1,43 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package types - -// Mode is used to explicitly represent the operating mode of the transformer -type Mode int - -// Mode enums -const ( - HeaderSync Mode = iota - FullSync -) - -// IsValid returns true is the Mode is valid -func (mode Mode) IsValid() bool { - return mode >= HeaderSync && mode <= FullSync -} - -// String returns the string representation of the mode -func (mode Mode) String() string { - switch mode { - case HeaderSync: - return "header" - case FullSync: - return "full" - default: - return "unknown" - } -} diff --git a/pkg/super_node/eth/converter.go b/pkg/eth/converter.go similarity index 98% rename from pkg/super_node/eth/converter.go rename to pkg/eth/converter.go index ca2b587f..24d567b0 100644 --- a/pkg/super_node/eth/converter.go +++ b/pkg/eth/converter.go @@ -26,7 +26,7 @@ import ( "github.com/ethereum/go-ethereum/rlp" "github.com/ethereum/go-ethereum/statediff" - "github.com/vulcanize/vulcanizedb/pkg/super_node/shared" + "github.com/vulcanize/ipfs-chain-watcher/pkg/shared" ) // PayloadConverter satisfies the PayloadConverter interface for ethereum diff --git a/pkg/super_node/eth/converter_test.go b/pkg/eth/converter_test.go similarity index 95% rename from pkg/super_node/eth/converter_test.go rename to pkg/eth/converter_test.go index 6c4f94bf..830bee76 100644 --- a/pkg/super_node/eth/converter_test.go +++ b/pkg/eth/converter_test.go @@ -22,8 +22,8 @@ import ( . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" - "github.com/vulcanize/vulcanizedb/pkg/super_node/eth" - "github.com/vulcanize/vulcanizedb/pkg/super_node/eth/mocks" + "github.com/vulcanize/ipfs-chain-watcher/pkg/eth" + "github.com/vulcanize/ipfs-chain-watcher/pkg/eth/mocks" ) var _ = Describe("Converter", func() { diff --git a/pkg/eth/converters/cold_db/transaction_converter.go b/pkg/eth/converters/cold_db/transaction_converter.go deleted file mode 100644 index 16cf4ccb..00000000 --- a/pkg/eth/converters/cold_db/transaction_converter.go +++ /dev/null @@ -1,88 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package cold_db - -import ( - "strings" - - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/core/types" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" - "golang.org/x/sync/errgroup" -) - -type ColdDbTransactionConverter struct{} - -func NewColdDbTransactionConverter() *ColdDbTransactionConverter { - return &ColdDbTransactionConverter{} -} - -func (cdtc *ColdDbTransactionConverter) ConvertBlockTransactionsToCore(gethBlock *types.Block) ([]core.TransactionModel, error) { - var g errgroup.Group - coreTransactions := make([]core.TransactionModel, len(gethBlock.Transactions())) - - for gethTransactionIndex, gethTransaction := range gethBlock.Transactions() { - transaction := gethTransaction - transactionIndex := uint(gethTransactionIndex) - g.Go(func() error { - signer := getSigner(transaction) - sender, err := signer.Sender(transaction) - if err != nil { - return err - } - coreTransaction := transToCoreTrans(transaction, &sender) - coreTransactions[transactionIndex] = coreTransaction - return nil - }) - } - if err := g.Wait(); err != nil { - return coreTransactions, err - } - return coreTransactions, nil -} - -func (cdtc *ColdDbTransactionConverter) ConvertRPCTransactionsToModels(transactions []core.RPCTransaction) ([]core.TransactionModel, error) { - panic("converting transaction indexes to integer not supported for cold import") -} - -func getSigner(tx *types.Transaction) types.Signer { - v, _, _ := tx.RawSignatureValues() - if v.Sign() != 0 && tx.Protected() { - return types.NewEIP155Signer(tx.ChainId()) - } - return types.HomesteadSigner{} -} - -func transToCoreTrans(transaction *types.Transaction, from *common.Address) core.TransactionModel { - return core.TransactionModel{ - Hash: transaction.Hash().Hex(), - Nonce: transaction.Nonce(), - To: strings.ToLower(addressToHex(transaction.To())), - From: strings.ToLower(addressToHex(from)), - GasLimit: transaction.Gas(), - GasPrice: transaction.GasPrice().Int64(), - Value: transaction.Value().String(), - Data: transaction.Data(), - } -} - -func addressToHex(to *common.Address) string { - if to == nil { - return "" - } - return to.Hex() -} diff --git a/pkg/eth/converters/common/block_converter.go b/pkg/eth/converters/common/block_converter.go deleted file mode 100644 index 35be241b..00000000 --- a/pkg/eth/converters/common/block_converter.go +++ /dev/null @@ -1,91 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package common - -import ( - "encoding/json" - "math/big" - "strconv" - "strings" - - "github.com/ethereum/go-ethereum/common/hexutil" - "github.com/ethereum/go-ethereum/core/types" - - "github.com/vulcanize/vulcanizedb/pkg/eth/core" -) - -type BlockConverter struct { - transactionConverter TransactionConverter -} - -func NewBlockConverter(transactionConverter TransactionConverter) BlockConverter { - return BlockConverter{transactionConverter: transactionConverter} -} - -func (bc BlockConverter) ToCoreBlock(gethBlock *types.Block) (core.Block, error) { - transactions, err := bc.transactionConverter.ConvertBlockTransactionsToCore(gethBlock) - if err != nil { - return core.Block{}, err - } - - coreBlock := core.Block{ - Difficulty: gethBlock.Difficulty().Int64(), - ExtraData: hexutil.Encode(gethBlock.Extra()), - GasLimit: gethBlock.GasLimit(), - GasUsed: gethBlock.GasUsed(), - Hash: gethBlock.Hash().Hex(), - Miner: strings.ToLower(gethBlock.Coinbase().Hex()), - Nonce: hexutil.Encode(gethBlock.Header().Nonce[:]), - Number: gethBlock.Number().Int64(), - ParentHash: gethBlock.ParentHash().Hex(), - Size: gethBlock.Size().String(), - Time: gethBlock.Time(), - Transactions: transactions, - UncleHash: gethBlock.UncleHash().Hex(), - } - coreBlock.Reward = CalcBlockReward(coreBlock, gethBlock.Uncles()).String() - totalUncleReward, uncles := bc.ToCoreUncle(coreBlock, gethBlock.Uncles()) - - coreBlock.UnclesReward = totalUncleReward.String() - coreBlock.Uncles = uncles - return coreBlock, nil -} - -// Rewards for the miners of uncles is calculated as (U_n + 8 - B_n) * R / 8 -// Where U_n is the uncle block number, B_n is the parent block number and R is the static block reward at B_n -// https://github.com/ethereum/go-ethereum/issues/1591 -// https://ethereum.stackexchange.com/questions/27172/different-uncles-reward -// https://github.com/ethereum/homestead-guide/issues/399 -// Returns the total uncle reward and the individual processed uncles -func (bc BlockConverter) ToCoreUncle(block core.Block, uncles []*types.Header) (*big.Int, []core.Uncle) { - totalUncleRewards := new(big.Int) - coreUncles := make([]core.Uncle, 0, len(uncles)) - for _, uncle := range uncles { - thisUncleReward := calcUncleMinerReward(block.Number, uncle.Number.Int64()) - raw, _ := json.Marshal(uncle) - coreUncle := core.Uncle{ - Miner: uncle.Coinbase.Hex(), - Hash: uncle.Hash().Hex(), - Raw: raw, - Reward: thisUncleReward.String(), - Timestamp: strconv.FormatUint(uncle.Time, 10), - } - coreUncles = append(coreUncles, coreUncle) - totalUncleRewards.Add(totalUncleRewards, thisUncleReward) - } - return totalUncleRewards, coreUncles -} diff --git a/pkg/eth/converters/common/block_converter_test.go b/pkg/eth/converters/common/block_converter_test.go deleted file mode 100644 index ed73f95a..00000000 --- a/pkg/eth/converters/common/block_converter_test.go +++ /dev/null @@ -1,393 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package common_test - -import ( - "bytes" - "io/ioutil" - "log" - "math/big" - "os" - - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/common/hexutil" - "github.com/ethereum/go-ethereum/core/types" - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - - vulcCommon "github.com/vulcanize/vulcanizedb/pkg/eth/converters/common" - "github.com/vulcanize/vulcanizedb/pkg/eth/converters/rpc" - "github.com/vulcanize/vulcanizedb/pkg/eth/fakes" -) - -var _ = Describe("Conversion of GethBlock to core.Block", func() { - - It("converts basic Block metadata", func() { - difficulty := big.NewInt(1) - gasLimit := uint64(100000) - gasUsed := uint64(100000) - miner := common.HexToAddress("0x0000000000000000000000000000000000000123") - extraData, _ := hexutil.Decode("0xe4b883e5bda9e7a59ee4bb99e9b1bc") - nonce := types.BlockNonce{10} - number := int64(1) - time := uint64(140000000) - - header := types.Header{ - Difficulty: difficulty, - GasLimit: uint64(gasLimit), - GasUsed: uint64(gasUsed), - Extra: extraData, - Coinbase: miner, - Nonce: nonce, - Number: big.NewInt(number), - ParentHash: common.Hash{64}, - Time: time, - UncleHash: common.Hash{128}, - } - block := types.NewBlock(&header, []*types.Transaction{}, []*types.Header{}, []*types.Receipt{}) - client := fakes.NewMockEthClient() - transactionConverter := rpc.NewRPCTransactionConverter(client) - blockConverter := vulcCommon.NewBlockConverter(transactionConverter) - - coreBlock, err := blockConverter.ToCoreBlock(block) - - Expect(err).ToNot(HaveOccurred()) - Expect(coreBlock.Difficulty).To(Equal(difficulty.Int64())) - Expect(coreBlock.GasLimit).To(Equal(gasLimit)) - Expect(coreBlock.Miner).To(Equal(miner.Hex())) - Expect(coreBlock.GasUsed).To(Equal(gasUsed)) - Expect(coreBlock.Hash).To(Equal(block.Hash().Hex())) - Expect(coreBlock.Nonce).To(Equal(hexutil.Encode(header.Nonce[:]))) - Expect(coreBlock.Number).To(Equal(number)) - Expect(coreBlock.ParentHash).To(Equal(block.ParentHash().Hex())) - Expect(coreBlock.ExtraData).To(Equal(hexutil.Encode(block.Extra()))) - Expect(coreBlock.Size).To(Equal(block.Size().String())) - Expect(coreBlock.Time).To(Equal(time)) - Expect(coreBlock.UncleHash).To(Equal(block.UncleHash().Hex())) - Expect(coreBlock.IsFinal).To(BeFalse()) - }) - - Describe("The block and uncle rewards calculations", func() { - It("calculates block rewards for a block", func() { - transaction := types.NewTransaction( - uint64(226823), - common.HexToAddress("0x108fedb097c1dcfed441480170144d8e19bb217f"), - big.NewInt(1080900090000000000), - uint64(90000), - big.NewInt(50000000000), - []byte{}, - ) - transactions := []*types.Transaction{transaction} - - txHash := transaction.Hash() - receipt := types.Receipt{ - TxHash: txHash, - GasUsed: uint64(21000), - CumulativeGasUsed: uint64(21000), - } - receipts := []*types.Receipt{&receipt} - - client := fakes.NewMockEthClient() - client.SetTransactionReceipts(receipts) - - number := int64(1071819) - header := types.Header{ - Number: big.NewInt(number), - } - uncles := []*types.Header{{Number: big.NewInt(1071817)}, {Number: big.NewInt(1071818)}} - block := types.NewBlock(&header, transactions, uncles, []*types.Receipt{&receipt}) - transactionConverter := rpc.NewRPCTransactionConverter(client) - blockConverter := vulcCommon.NewBlockConverter(transactionConverter) - - coreBlock, err := blockConverter.ToCoreBlock(block) - Expect(err).ToNot(HaveOccurred()) - - expectedBlockReward := new(big.Int) - expectedBlockReward.SetString("5313550000000000000", 10) - - blockReward := vulcCommon.CalcBlockReward(coreBlock, block.Uncles()) - Expect(blockReward.String()).To(Equal(expectedBlockReward.String())) - }) - - It("calculates the uncles reward for a block", func() { - transaction := types.NewTransaction( - uint64(226823), - common.HexToAddress("0x108fedb097c1dcfed441480170144d8e19bb217f"), - big.NewInt(1080900090000000000), - uint64(90000), - big.NewInt(50000000000), - []byte{}) - transactions := []*types.Transaction{transaction} - - receipt := types.Receipt{ - TxHash: transaction.Hash(), - GasUsed: uint64(21000), - CumulativeGasUsed: uint64(21000), - } - receipts := []*types.Receipt{&receipt} - - header := types.Header{ - Number: big.NewInt(int64(1071819)), - } - uncles := []*types.Header{ - {Number: big.NewInt(1071816)}, - {Number: big.NewInt(1071817)}, - } - block := types.NewBlock(&header, transactions, uncles, receipts) - - client := fakes.NewMockEthClient() - client.SetTransactionReceipts(receipts) - transactionConverter := rpc.NewRPCTransactionConverter(client) - blockConverter := vulcCommon.NewBlockConverter(transactionConverter) - - coreBlock, err := blockConverter.ToCoreBlock(block) - Expect(err).ToNot(HaveOccurred()) - - expectedTotalReward := new(big.Int) - expectedTotalReward.SetString("6875000000000000000", 10) - totalReward, coreUncles := blockConverter.ToCoreUncle(coreBlock, block.Uncles()) - Expect(totalReward.String()).To(Equal(expectedTotalReward.String())) - - Expect(len(coreUncles)).To(Equal(2)) - Expect(coreUncles[0].Reward).To(Equal("3125000000000000000")) - Expect(coreUncles[0].Miner).To(Equal("0x0000000000000000000000000000000000000000")) - Expect(coreUncles[0].Hash).To(Equal("0xb629de4014b6e30cf9555ee833f1806fa0d8b8516fde194405f9c98c2deb8772")) - Expect(coreUncles[1].Reward).To(Equal("3750000000000000000")) - Expect(coreUncles[1].Miner).To(Equal("0x0000000000000000000000000000000000000000")) - Expect(coreUncles[1].Hash).To(Equal("0x673f5231e4888a951e0bc8a25b5774b982e6e9e258362c21affaff6e02dd5a2b")) - }) - - It("decreases the static block reward from 5 to 3 for blocks after block 4,269,999", func() { - transactionOne := types.NewTransaction( - uint64(8072), - common.HexToAddress("0xebd17720aeb7ac5186c5dfa7bafeb0bb14c02551 "), - big.NewInt(0), - uint64(500000), - big.NewInt(42000000000), - []byte{}, - ) - - transactionTwo := types.NewTransaction(uint64(8071), - common.HexToAddress("0x3cdab63d764c8c5048ed5e8f0a4e95534ba7e1ea"), - big.NewInt(0), - uint64(500000), - big.NewInt(42000000000), - []byte{}) - - transactions := []*types.Transaction{transactionOne, transactionTwo} - - receiptOne := types.Receipt{ - TxHash: transactionOne.Hash(), - GasUsed: uint64(297508), - CumulativeGasUsed: uint64(0), - } - receiptTwo := types.Receipt{ - TxHash: transactionTwo.Hash(), - GasUsed: uint64(297508), - CumulativeGasUsed: uint64(0), - } - receipts := []*types.Receipt{&receiptOne, &receiptTwo} - - number := int64(4370055) - header := types.Header{ - Number: big.NewInt(number), - } - var uncles []*types.Header - block := types.NewBlock(&header, transactions, uncles, receipts) - - client := fakes.NewMockEthClient() - client.SetTransactionReceipts(receipts) - transactionConverter := rpc.NewRPCTransactionConverter(client) - blockConverter := vulcCommon.NewBlockConverter(transactionConverter) - - coreBlock, err := blockConverter.ToCoreBlock(block) - Expect(err).ToNot(HaveOccurred()) - - expectedRewards := new(big.Int) - expectedRewards.SetString("3024990672000000000", 10) - rewards := vulcCommon.CalcBlockReward(coreBlock, block.Uncles()) - Expect(rewards.String()).To(Equal(expectedRewards.String())) - }) - }) - - Describe("the converted transactions", func() { - It("is empty", func() { - header := types.Header{} - block := types.NewBlock(&header, []*types.Transaction{}, []*types.Header{}, []*types.Receipt{}) - client := fakes.NewMockEthClient() - transactionConverter := rpc.NewRPCTransactionConverter(client) - blockConverter := vulcCommon.NewBlockConverter(transactionConverter) - - coreBlock, err := blockConverter.ToCoreBlock(block) - - Expect(err).ToNot(HaveOccurred()) - Expect(len(coreBlock.Transactions)).To(Equal(0)) - }) - - It("converts a single transaction", func() { - gethTransaction := types.NewTransaction( - uint64(10000), common.Address{1}, - big.NewInt(10), - uint64(5000), - big.NewInt(3), - hexutil.MustDecode("0xf7d8c8830000000000000000000000000000000000000000000000000000000000037788000000000000000000000000000000000000000000000000000000000003bd14"), - ) - var rawTransaction bytes.Buffer - encodeErr := gethTransaction.EncodeRLP(&rawTransaction) - Expect(encodeErr).NotTo(HaveOccurred()) - - gethReceipt := &types.Receipt{ - Bloom: types.BytesToBloom(hexutil.MustDecode("0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000")), - ContractAddress: common.HexToAddress("x123"), - CumulativeGasUsed: uint64(7996119), - GasUsed: uint64(21000), - Logs: []*types.Log{}, - Status: uint64(1), - TxHash: gethTransaction.Hash(), - } - - client := fakes.NewMockEthClient() - client.SetTransactionReceipts([]*types.Receipt{gethReceipt}) - - header := types.Header{} - block := types.NewBlock( - &header, - []*types.Transaction{gethTransaction}, - []*types.Header{}, - []*types.Receipt{gethReceipt}, - ) - transactionConverter := rpc.NewRPCTransactionConverter(client) - blockConverter := vulcCommon.NewBlockConverter(transactionConverter) - - coreBlock, err := blockConverter.ToCoreBlock(block) - - Expect(err).ToNot(HaveOccurred()) - Expect(len(coreBlock.Transactions)).To(Equal(1)) - coreTransaction := coreBlock.Transactions[0] - expectedData := common.FromHex("0xf7d8c8830000000000000000000000000000000000000000000000000000000000037788000000000000000000000000000000000000000000000000000000000003bd14") - Expect(coreTransaction.Data).To(Equal(expectedData)) - Expect(coreTransaction.To).To(Equal(gethTransaction.To().Hex())) - Expect(coreTransaction.From).To(Equal("0x0000000000000000000000000000000000000123")) - Expect(coreTransaction.GasLimit).To(Equal(gethTransaction.Gas())) - Expect(coreTransaction.GasPrice).To(Equal(gethTransaction.GasPrice().Int64())) - Expect(coreTransaction.Raw).To(Equal(rawTransaction.Bytes())) - Expect(coreTransaction.TxIndex).To(Equal(int64(0))) - Expect(coreTransaction.Value).To(Equal(gethTransaction.Value().String())) - Expect(coreTransaction.Nonce).To(Equal(gethTransaction.Nonce())) - - coreReceipt := coreTransaction.Receipt - expectedReceipt, err := vulcCommon.ToCoreReceipt(gethReceipt) - Expect(err).ToNot(HaveOccurred()) - Expect(coreReceipt).To(Equal(expectedReceipt)) - }) - - It("has an empty 'To' field when transaction creates a new contract", func() { - gethTransaction := types.NewContractCreation( - uint64(10000), - big.NewInt(10), - uint64(5000), - big.NewInt(3), - []byte("1234"), - ) - - gethReceipt := &types.Receipt{ - CumulativeGasUsed: uint64(1), - GasUsed: uint64(1), - TxHash: gethTransaction.Hash(), - ContractAddress: common.HexToAddress("0x1023342345"), - } - - client := fakes.NewMockEthClient() - client.SetTransactionReceipts([]*types.Receipt{gethReceipt}) - - block := types.NewBlock( - &types.Header{}, - []*types.Transaction{gethTransaction}, - []*types.Header{}, - []*types.Receipt{gethReceipt}, - ) - transactionConverter := rpc.NewRPCTransactionConverter(client) - blockConverter := vulcCommon.NewBlockConverter(transactionConverter) - - coreBlock, err := blockConverter.ToCoreBlock(block) - - Expect(err).ToNot(HaveOccurred()) - coreTransaction := coreBlock.Transactions[0] - Expect(coreTransaction.To).To(Equal("")) - - coreReceipt := coreTransaction.Receipt - expectedReceipt, err := vulcCommon.ToCoreReceipt(gethReceipt) - Expect(err).ToNot(HaveOccurred()) - Expect(coreReceipt).To(Equal(expectedReceipt)) - }) - }) - - Describe("transaction error handling", func() { - var gethTransaction *types.Transaction - var gethReceipt *types.Receipt - var header *types.Header - var block *types.Block - - BeforeEach(func() { - log.SetOutput(ioutil.Discard) - gethTransaction = types.NewTransaction( - uint64(0), - common.Address{}, - big.NewInt(0), - uint64(0), - big.NewInt(0), - []byte{}, - ) - gethReceipt = &types.Receipt{} - header = &types.Header{} - block = types.NewBlock( - header, - []*types.Transaction{gethTransaction}, - []*types.Header{}, - []*types.Receipt{gethReceipt}, - ) - - }) - - AfterEach(func() { - defer log.SetOutput(os.Stdout) - }) - - It("returns an error when transaction sender call fails", func() { - client := fakes.NewMockEthClient() - client.SetTransactionSenderErr(fakes.FakeError) - transactionConverter := rpc.NewRPCTransactionConverter(client) - blockConverter := vulcCommon.NewBlockConverter(transactionConverter) - - _, err := blockConverter.ToCoreBlock(block) - - Expect(err).To(MatchError(fakes.FakeError)) - }) - - It("returns an error when transaction receipt call fails", func() { - client := fakes.NewMockEthClient() - client.SetTransactionReceiptErr(fakes.FakeError) - transactionConverter := rpc.NewRPCTransactionConverter(client) - blockConverter := vulcCommon.NewBlockConverter(transactionConverter) - - _, err := blockConverter.ToCoreBlock(block) - - Expect(err).To(MatchError(fakes.FakeError)) - }) - }) - -}) diff --git a/pkg/eth/converters/common/common_suite_test.go b/pkg/eth/converters/common/common_suite_test.go deleted file mode 100644 index 06af9ce1..00000000 --- a/pkg/eth/converters/common/common_suite_test.go +++ /dev/null @@ -1,29 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package common_test - -import ( - "testing" - - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" -) - -func TestCommon(t *testing.T) { - RegisterFailHandler(Fail) - RunSpecs(t, "Common Suite") -} diff --git a/pkg/eth/converters/common/full_sync_log_converter.go b/pkg/eth/converters/common/full_sync_log_converter.go deleted file mode 100644 index c7dc1871..00000000 --- a/pkg/eth/converters/common/full_sync_log_converter.go +++ /dev/null @@ -1,57 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package common - -import ( - "strings" - - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/common/hexutil" - "github.com/ethereum/go-ethereum/core/types" - - "github.com/vulcanize/vulcanizedb/pkg/eth/core" -) - -func ToFullSyncLogs(gethLogs []types.Log) []core.FullSyncLog { - var logs []core.FullSyncLog - for _, log := range gethLogs { - log := ToCoreLog(log) - logs = append(logs, log) - } - return logs -} - -func makeTopics(topics []common.Hash) core.Topics { - var hexTopics core.Topics - for i, topic := range topics { - hexTopics[i] = topic.Hex() - } - return hexTopics -} - -func ToCoreLog(gethLog types.Log) core.FullSyncLog { - topics := gethLog.Topics - hexTopics := makeTopics(topics) - return core.FullSyncLog{ - Address: strings.ToLower(gethLog.Address.Hex()), - BlockNumber: int64(gethLog.BlockNumber), - Topics: hexTopics, - TxHash: gethLog.TxHash.Hex(), - Index: int64(gethLog.Index), - Data: hexutil.Encode(gethLog.Data), - } -} diff --git a/pkg/eth/converters/common/full_sync_log_converter_test.go b/pkg/eth/converters/common/full_sync_log_converter_test.go deleted file mode 100644 index 6ac4a0d7..00000000 --- a/pkg/eth/converters/common/full_sync_log_converter_test.go +++ /dev/null @@ -1,110 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package common_test - -import ( - "strings" - - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/common/hexutil" - "github.com/ethereum/go-ethereum/core/types" - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - - vulcCommon "github.com/vulcanize/vulcanizedb/pkg/eth/converters/common" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" -) - -var _ = Describe("Conversion of GethLog to core.FullSyncLog", func() { - - It("converts geth log to internal log format", func() { - gethLog := types.Log{ - Address: common.HexToAddress("0x448a5065aeBB8E423F0896E6c5D525C040f59af3"), - BlockHash: common.HexToHash("0x656c34545f90a730a19008c0e7a7cd4fb3895064b48d6d69761bd5abad681056"), - BlockNumber: 2019236, - Data: hexutil.MustDecode("0x000000000000000000000000000000000000000000000001a055690d9db80000"), - Index: 2, - TxIndex: 3, - TxHash: common.HexToHash("0x3b198bfd5d2907285af009e9ae84a0ecd63677110d89d7e030251acb87f6487e"), - Topics: []common.Hash{ - common.HexToHash("0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef"), - common.HexToHash("0x00000000000000000000000080b2c9d7cbbf30a1b0fc8983c647d754c6525615"), - }, - } - - expected := core.FullSyncLog{ - Address: strings.ToLower(gethLog.Address.Hex()), - BlockNumber: int64(gethLog.BlockNumber), - Data: hexutil.Encode(gethLog.Data), - TxHash: gethLog.TxHash.Hex(), - Index: 2, - Topics: core.Topics{ - gethLog.Topics[0].Hex(), - gethLog.Topics[1].Hex(), - }, - } - - coreLog := vulcCommon.ToCoreLog(gethLog) - - Expect(coreLog.Address).To(Equal(expected.Address)) - Expect(coreLog.BlockNumber).To(Equal(expected.BlockNumber)) - Expect(coreLog.Data).To(Equal(expected.Data)) - Expect(coreLog.Index).To(Equal(expected.Index)) - Expect(coreLog.Topics[0]).To(Equal(expected.Topics[0])) - Expect(coreLog.Topics[1]).To(Equal(expected.Topics[1])) - Expect(coreLog.TxHash).To(Equal(expected.TxHash)) - }) - - It("converts geth log array to array of internal logs", func() { - gethLogOne := types.Log{ - Address: common.HexToAddress("0xecf8f87f810ecf450940c9f60066b4a7a501d6a7"), - BlockHash: common.HexToHash("0x656c34545f90a730a19008c0e7a7cd4fb3895064b48d6d69761bd5abad681056"), - BlockNumber: 2019236, - Data: hexutil.MustDecode("0x000000000000000000000000000000000000000000000001a055690d9db80000"), - Index: 2, - TxIndex: 3, - TxHash: common.HexToHash("0x3b198bfd5d2907285af009e9ae84a0ecd63677110d89d7e030251acb87f6487e"), - Topics: []common.Hash{ - common.HexToHash("0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef"), - common.HexToHash("0x00000000000000000000000080b2c9d7cbbf30a1b0fc8983c647d754c6525615"), - }, - } - - gethLogTwo := types.Log{ - Address: common.HexToAddress("0x123"), - BlockHash: common.HexToHash("0x576"), - BlockNumber: 2019236, - Data: hexutil.MustDecode("0x0000000000000000000000000000000000000000000000000000000000000001"), - Index: 3, - TxIndex: 4, - TxHash: common.HexToHash("0x134"), - Topics: []common.Hash{ - common.HexToHash("0xaaa"), - common.HexToHash("0xbbb"), - }, - } - - expectedOne := vulcCommon.ToCoreLog(gethLogOne) - expectedTwo := vulcCommon.ToCoreLog(gethLogTwo) - - coreLogs := vulcCommon.ToFullSyncLogs([]types.Log{gethLogOne, gethLogTwo}) - - Expect(len(coreLogs)).To(Equal(2)) - Expect(coreLogs[0]).To(Equal(expectedOne)) - Expect(coreLogs[1]).To(Equal(expectedTwo)) - }) -}) diff --git a/pkg/eth/converters/common/header_converter.go b/pkg/eth/converters/common/header_converter.go deleted file mode 100644 index 0f2e0ff6..00000000 --- a/pkg/eth/converters/common/header_converter.go +++ /dev/null @@ -1,42 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package common - -import ( - "encoding/json" - "strconv" - - "github.com/ethereum/go-ethereum/core/types" - - "github.com/vulcanize/vulcanizedb/pkg/eth/core" -) - -type HeaderConverter struct{} - -func (converter HeaderConverter) Convert(gethHeader *types.Header, blockHash string) core.Header { - rawHeader, err := json.Marshal(gethHeader) - if err != nil { - panic(err) - } - coreHeader := core.Header{ - Hash: blockHash, - BlockNumber: gethHeader.Number.Int64(), - Raw: rawHeader, - Timestamp: strconv.FormatUint(gethHeader.Time, 10), - } - return coreHeader -} diff --git a/pkg/eth/converters/common/header_converter_test.go b/pkg/eth/converters/common/header_converter_test.go deleted file mode 100644 index c87d2083..00000000 --- a/pkg/eth/converters/common/header_converter_test.go +++ /dev/null @@ -1,65 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package common_test - -import ( - "encoding/json" - "math/big" - "strconv" - - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/core/types" - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - - common2 "github.com/vulcanize/vulcanizedb/pkg/eth/converters/common" - "github.com/vulcanize/vulcanizedb/pkg/eth/fakes" -) - -var _ = Describe("Block header converter", func() { - It("converts geth header to core header", func() { - gethHeader := &types.Header{ - Difficulty: big.NewInt(1), - Number: big.NewInt(2), - ParentHash: common.HexToHash("0xParent"), - ReceiptHash: common.HexToHash("0xReceipt"), - Root: common.HexToHash("0xRoot"), - Time: uint64(123456789), - TxHash: common.HexToHash("0xTransaction"), - UncleHash: common.HexToHash("0xUncle"), - } - converter := common2.HeaderConverter{} - hash := fakes.FakeHash.String() - - coreHeader := converter.Convert(gethHeader, hash) - - Expect(coreHeader.BlockNumber).To(Equal(gethHeader.Number.Int64())) - Expect(coreHeader.Hash).To(Equal(hash)) - Expect(coreHeader.Timestamp).To(Equal(strconv.FormatUint(gethHeader.Time, 10))) - }) - - It("includes raw bytes for header as JSON", func() { - gethHeader := types.Header{Number: big.NewInt(123)} - converter := common2.HeaderConverter{} - - coreHeader := converter.Convert(&gethHeader, fakes.FakeHash.String()) - - expectedJSON, err := json.Marshal(gethHeader) - Expect(err).NotTo(HaveOccurred()) - Expect(coreHeader.Raw).To(Equal(expectedJSON)) - }) -}) diff --git a/pkg/eth/converters/common/receipt_converter.go b/pkg/eth/converters/common/receipt_converter.go deleted file mode 100644 index 1db8942a..00000000 --- a/pkg/eth/converters/common/receipt_converter.go +++ /dev/null @@ -1,89 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package common - -import ( - "bytes" - - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/common/hexutil" - "github.com/ethereum/go-ethereum/core/types" - - "github.com/vulcanize/vulcanizedb/pkg/eth/core" -) - -func ToCoreReceipts(gethReceipts types.Receipts) ([]core.Receipt, error) { - var coreReceipts []core.Receipt - for _, receipt := range gethReceipts { - coreReceipt, err := ToCoreReceipt(receipt) - if err != nil { - return nil, err - } - coreReceipts = append(coreReceipts, coreReceipt) - } - return coreReceipts, nil -} - -func ToCoreReceipt(gethReceipt *types.Receipt) (core.Receipt, error) { - bloom := hexutil.Encode(gethReceipt.Bloom.Bytes()) - var postState string - var status int - postState, status = postStateOrStatus(gethReceipt) - logs := dereferenceLogs(gethReceipt) - contractAddress := setContractAddress(gethReceipt) - - rlpBuff := new(bytes.Buffer) - receiptForStorage := types.ReceiptForStorage(*gethReceipt) - err := receiptForStorage.EncodeRLP(rlpBuff) - if err != nil { - return core.Receipt{}, err - } - return core.Receipt{ - Bloom: bloom, - ContractAddress: contractAddress, - CumulativeGasUsed: gethReceipt.CumulativeGasUsed, - GasUsed: gethReceipt.GasUsed, - Logs: logs, - StateRoot: postState, - TxHash: gethReceipt.TxHash.Hex(), - Status: status, - Rlp: rlpBuff.Bytes(), - }, nil -} - -func setContractAddress(gethReceipt *types.Receipt) string { - emptyAddress := common.Address{}.Bytes() - if bytes.Equal(gethReceipt.ContractAddress.Bytes(), emptyAddress) { - return "" - } - return gethReceipt.ContractAddress.Hex() -} - -func dereferenceLogs(gethReceipt *types.Receipt) []core.FullSyncLog { - logs := []core.FullSyncLog{} - for _, log := range gethReceipt.Logs { - logs = append(logs, ToCoreLog(*log)) - } - return logs -} - -func postStateOrStatus(gethReceipts *types.Receipt) (string, int) { - if len(gethReceipts.PostState) != 0 { - return hexutil.Encode(gethReceipts.PostState), -99 - } - return "", int(gethReceipts.Status) -} diff --git a/pkg/eth/converters/common/receipt_converter_test.go b/pkg/eth/converters/common/receipt_converter_test.go deleted file mode 100644 index 42830326..00000000 --- a/pkg/eth/converters/common/receipt_converter_test.go +++ /dev/null @@ -1,115 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package common_test - -import ( - "bytes" - - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/common/hexutil" - "github.com/ethereum/go-ethereum/core/types" - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - - vulcCommon "github.com/vulcanize/vulcanizedb/pkg/eth/converters/common" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" -) - -var _ = Describe("Conversion of GethReceipt to core.Receipt", func() { - - It(`converts geth receipt to internal receipt format (pre Byzantium has post-transaction stateroot)`, func() { - receipt := types.Receipt{ - Bloom: types.BytesToBloom(hexutil.MustDecode("0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000")), - ContractAddress: common.Address{}, - CumulativeGasUsed: uint64(25000), - GasUsed: uint64(21000), - Logs: []*types.Log{}, - PostState: hexutil.MustDecode("0x88abf7e73128227370aa7baa3dd4e18d0af70e92ef1f9ef426942fbe2dddb733"), - TxHash: common.HexToHash("0x97d99bc7729211111a21b12c933c949d4f31684f1d6954ff477d0477538ff017"), - } - - rlpBuff := new(bytes.Buffer) - receiptForStorage := types.ReceiptForStorage(receipt) - err := receiptForStorage.EncodeRLP(rlpBuff) - Expect(err).ToNot(HaveOccurred()) - - expected := core.Receipt{ - Bloom: "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", - ContractAddress: "", - CumulativeGasUsed: 25000, - GasUsed: 21000, - Logs: []core.FullSyncLog{}, - StateRoot: "0x88abf7e73128227370aa7baa3dd4e18d0af70e92ef1f9ef426942fbe2dddb733", - Status: -99, - TxHash: receipt.TxHash.Hex(), - Rlp: rlpBuff.Bytes(), - } - - coreReceipt, err := vulcCommon.ToCoreReceipt(&receipt) - Expect(err).ToNot(HaveOccurred()) - Expect(coreReceipt.Bloom).To(Equal(expected.Bloom)) - Expect(coreReceipt.ContractAddress).To(Equal(expected.ContractAddress)) - Expect(coreReceipt.CumulativeGasUsed).To(Equal(expected.CumulativeGasUsed)) - Expect(coreReceipt.GasUsed).To(Equal(expected.GasUsed)) - Expect(coreReceipt.Logs).To(Equal(expected.Logs)) - Expect(coreReceipt.StateRoot).To(Equal(expected.StateRoot)) - Expect(coreReceipt.Status).To(Equal(expected.Status)) - Expect(coreReceipt.TxHash).To(Equal(expected.TxHash)) - Expect(bytes.Compare(coreReceipt.Rlp, expected.Rlp)).To(Equal(0)) - }) - - It("converts geth receipt to internal receipt format (post Byzantium has status", func() { - receipt := types.Receipt{ - Bloom: types.BytesToBloom(hexutil.MustDecode("0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000")), - ContractAddress: common.HexToAddress("x0123"), - CumulativeGasUsed: uint64(7996119), - GasUsed: uint64(21000), - Logs: []*types.Log{}, - Status: uint64(1), - TxHash: common.HexToHash("0xe340558980f89d5f86045ac11e5cc34e4bcec20f9f1e2a427aa39d87114e8223"), - } - - rlpBuff := new(bytes.Buffer) - receiptForStorage := types.ReceiptForStorage(receipt) - err := receiptForStorage.EncodeRLP(rlpBuff) - Expect(err).ToNot(HaveOccurred()) - - expected := core.Receipt{ - Bloom: "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", - ContractAddress: receipt.ContractAddress.Hex(), - CumulativeGasUsed: 7996119, - GasUsed: 21000, - Logs: []core.FullSyncLog{}, - StateRoot: "", - Status: 1, - TxHash: receipt.TxHash.Hex(), - Rlp: rlpBuff.Bytes(), - } - - coreReceipt, err := vulcCommon.ToCoreReceipt(&receipt) - Expect(err).ToNot(HaveOccurred()) - Expect(coreReceipt.Bloom).To(Equal(expected.Bloom)) - Expect(coreReceipt.ContractAddress).To(Equal("")) - Expect(coreReceipt.CumulativeGasUsed).To(Equal(expected.CumulativeGasUsed)) - Expect(coreReceipt.GasUsed).To(Equal(expected.GasUsed)) - Expect(coreReceipt.Logs).To(Equal(expected.Logs)) - Expect(coreReceipt.StateRoot).To(Equal(expected.StateRoot)) - Expect(coreReceipt.Status).To(Equal(expected.Status)) - Expect(coreReceipt.TxHash).To(Equal(expected.TxHash)) - Expect(bytes.Compare(coreReceipt.Rlp, expected.Rlp)).To(Equal(0)) - }) -}) diff --git a/pkg/eth/converters/common/transaction_converter.go b/pkg/eth/converters/common/transaction_converter.go deleted file mode 100644 index 8d66dd3a..00000000 --- a/pkg/eth/converters/common/transaction_converter.go +++ /dev/null @@ -1,27 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package common - -import ( - "github.com/ethereum/go-ethereum/core/types" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" -) - -type TransactionConverter interface { - ConvertBlockTransactionsToCore(gethBlock *types.Block) ([]core.TransactionModel, error) - ConvertRPCTransactionsToModels(transactions []core.RPCTransaction) ([]core.TransactionModel, error) -} diff --git a/pkg/eth/converters/rpc/rpc_suite_test.go b/pkg/eth/converters/rpc/rpc_suite_test.go deleted file mode 100644 index 346968fa..00000000 --- a/pkg/eth/converters/rpc/rpc_suite_test.go +++ /dev/null @@ -1,13 +0,0 @@ -package rpc_test - -import ( - "testing" - - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" -) - -func TestRpc(t *testing.T) { - RegisterFailHandler(Fail) - RunSpecs(t, "Rpc Suite") -} diff --git a/pkg/eth/converters/rpc/transaction_converter.go b/pkg/eth/converters/rpc/transaction_converter.go deleted file mode 100644 index b851f06a..00000000 --- a/pkg/eth/converters/rpc/transaction_converter.go +++ /dev/null @@ -1,225 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package rpc - -import ( - "bytes" - "context" - "fmt" - "log" - "math/big" - "strings" - - "github.com/ethereum/go-ethereum/common/hexutil" - - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/core/types" - "github.com/ethereum/go-ethereum/rlp" - "golang.org/x/sync/errgroup" - - vulcCommon "github.com/vulcanize/vulcanizedb/pkg/eth/converters/common" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" -) - -type RPCTransactionConverter struct { - client core.EthClient -} - -// raw transaction data, required for generating RLP -type transactionData struct { - AccountNonce uint64 - Price *big.Int - GasLimit uint64 - Recipient *common.Address `rlp:"nil"` // nil means contract creation - Amount *big.Int - Payload []byte - V *big.Int - R *big.Int - S *big.Int -} - -func NewRPCTransactionConverter(client core.EthClient) *RPCTransactionConverter { - return &RPCTransactionConverter{client: client} -} - -func (converter *RPCTransactionConverter) ConvertRPCTransactionsToModels(transactions []core.RPCTransaction) ([]core.TransactionModel, error) { - var results []core.TransactionModel - for _, transaction := range transactions { - txData, convertErr := getTransactionData(transaction) - if convertErr != nil { - return nil, convertErr - } - txRLP, rlpErr := getTransactionRLP(txData) - if rlpErr != nil { - return nil, rlpErr - } - txIndex, txIndexErr := hexToBigInt(transaction.TransactionIndex) - if txIndexErr != nil { - return nil, txIndexErr - } - transactionModel := core.TransactionModel{ - Data: txData.Payload, - From: transaction.From, - GasLimit: txData.GasLimit, - GasPrice: txData.Price.Int64(), - Hash: transaction.Hash, - Nonce: txData.AccountNonce, - Raw: txRLP, - // NOTE: Header Sync transactions don't include receipt; would require separate RPC call - To: transaction.Recipient, - TxIndex: txIndex.Int64(), - Value: txData.Amount.String(), - } - results = append(results, transactionModel) - } - return results, nil -} - -func (converter *RPCTransactionConverter) ConvertBlockTransactionsToCore(gethBlock *types.Block) ([]core.TransactionModel, error) { - var g errgroup.Group - coreTransactions := make([]core.TransactionModel, len(gethBlock.Transactions())) - - for gethTransactionIndex, gethTransaction := range gethBlock.Transactions() { - //https://golang.org/doc/faq#closures_and_goroutines - transaction := gethTransaction - transactionIndex := uint(gethTransactionIndex) - g.Go(func() error { - from, err := converter.client.TransactionSender(context.Background(), transaction, gethBlock.Hash(), transactionIndex) - if err != nil { - log.Println("transaction sender: ", err) - return err - } - coreTransaction, convertErr := convertGethTransactionToModel(transaction, &from, int64(gethTransactionIndex)) - if convertErr != nil { - return convertErr - } - coreTransaction, err = converter.appendReceiptToTransaction(coreTransaction) - if err != nil { - log.Println("receipt: ", err) - return err - } - coreTransactions[transactionIndex] = coreTransaction - return nil - }) - } - if err := g.Wait(); err != nil { - log.Println("transactions: ", err) - return coreTransactions, err - } - return coreTransactions, nil -} - -func (converter *RPCTransactionConverter) appendReceiptToTransaction(transaction core.TransactionModel) (core.TransactionModel, error) { - gethReceipt, err := converter.client.TransactionReceipt(context.Background(), common.HexToHash(transaction.Hash)) - if err != nil { - return transaction, err - } - receipt, err := vulcCommon.ToCoreReceipt(gethReceipt) - if err != nil { - return transaction, err - } - transaction.Receipt = receipt - return transaction, nil -} - -func convertGethTransactionToModel(transaction *types.Transaction, from *common.Address, transactionIndex int64) (core.TransactionModel, error) { - raw := bytes.Buffer{} - encodeErr := transaction.EncodeRLP(&raw) - if encodeErr != nil { - return core.TransactionModel{}, encodeErr - } - return core.TransactionModel{ - Data: transaction.Data(), - From: strings.ToLower(addressToHex(from)), - GasLimit: transaction.Gas(), - GasPrice: transaction.GasPrice().Int64(), - Hash: transaction.Hash().Hex(), - Nonce: transaction.Nonce(), - Raw: raw.Bytes(), - To: strings.ToLower(addressToHex(transaction.To())), - TxIndex: transactionIndex, - Value: transaction.Value().String(), - }, nil -} - -func getTransactionData(transaction core.RPCTransaction) (transactionData, error) { - nonce, nonceErr := hexToBigInt(transaction.Nonce) - if nonceErr != nil { - return transactionData{}, nonceErr - } - gasPrice, gasPriceErr := hexToBigInt(transaction.GasPrice) - if gasPriceErr != nil { - return transactionData{}, gasPriceErr - } - gasLimit, gasLimitErr := hexToBigInt(transaction.GasLimit) - if gasLimitErr != nil { - return transactionData{}, gasLimitErr - } - recipient := common.HexToAddress(transaction.Recipient) - amount, amountErr := hexToBigInt(transaction.Amount) - if amountErr != nil { - return transactionData{}, amountErr - } - v, vErr := hexToBigInt(transaction.V) - if vErr != nil { - return transactionData{}, vErr - } - r, rErr := hexToBigInt(transaction.R) - if rErr != nil { - return transactionData{}, rErr - } - s, sErr := hexToBigInt(transaction.S) - if sErr != nil { - return transactionData{}, sErr - } - return transactionData{ - AccountNonce: nonce.Uint64(), - Price: gasPrice, - GasLimit: gasLimit.Uint64(), - Recipient: &recipient, - Amount: amount, - Payload: hexutil.MustDecode(transaction.Payload), - V: v, - R: r, - S: s, - }, nil -} - -func getTransactionRLP(txData transactionData) ([]byte, error) { - transactionRlp := bytes.Buffer{} - encodeErr := rlp.Encode(&transactionRlp, txData) - if encodeErr != nil { - return nil, encodeErr - } - return transactionRlp.Bytes(), nil -} - -func addressToHex(to *common.Address) string { - if to == nil { - return "" - } - return to.Hex() -} - -func hexToBigInt(hex string) (*big.Int, error) { - result := big.NewInt(0) - _, scanErr := fmt.Sscan(hex, result) - if scanErr != nil { - return nil, scanErr - } - return result, nil -} diff --git a/pkg/eth/converters/rpc/transaction_converter_test.go b/pkg/eth/converters/rpc/transaction_converter_test.go deleted file mode 100644 index 004abbaa..00000000 --- a/pkg/eth/converters/rpc/transaction_converter_test.go +++ /dev/null @@ -1,124 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package rpc_test - -import ( - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - "github.com/vulcanize/vulcanizedb/pkg/eth/converters/rpc" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" - "github.com/vulcanize/vulcanizedb/pkg/eth/fakes" -) - -var _ = Describe("RPC transaction converter", func() { - var converter rpc.RPCTransactionConverter - - BeforeEach(func() { - converter = rpc.RPCTransactionConverter{} - }) - - It("converts hex fields to integers", func() { - rpcTransaction := getFakeRpcTransaction("0x1") - - transactionModels, err := converter.ConvertRPCTransactionsToModels([]core.RPCTransaction{rpcTransaction}) - - Expect(err).NotTo(HaveOccurred()) - Expect(len(transactionModels)).To(Equal(1)) - Expect(transactionModels[0].GasLimit).To(Equal(uint64(1))) - Expect(transactionModels[0].GasPrice).To(Equal(int64(1))) - Expect(transactionModels[0].Nonce).To(Equal(uint64(1))) - Expect(transactionModels[0].TxIndex).To(Equal(int64(1))) - Expect(transactionModels[0].Value).To(Equal("1")) - }) - - It("returns error if invalid hex cannot be converted", func() { - invalidTransaction := getFakeRpcTransaction("invalid") - - _, err := converter.ConvertRPCTransactionsToModels([]core.RPCTransaction{invalidTransaction}) - - Expect(err).To(HaveOccurred()) - }) - - It("copies RPC transaction hash, from, and to values to model", func() { - rpcTransaction := getFakeRpcTransaction("0x1") - - transactionModels, err := converter.ConvertRPCTransactionsToModels([]core.RPCTransaction{rpcTransaction}) - - Expect(err).NotTo(HaveOccurred()) - Expect(len(transactionModels)).To(Equal(1)) - Expect(transactionModels[0].Hash).To(Equal(rpcTransaction.Hash)) - Expect(transactionModels[0].From).To(Equal(rpcTransaction.From)) - Expect(transactionModels[0].To).To(Equal(rpcTransaction.Recipient)) - }) - - It("derives transaction RLP", func() { - // actual transaction: https://kovan.etherscan.io/tx/0x3b29ef265425d304069c57e5145cd1c7558568b06d231775f50a693bee1aad4f - rpcTransaction := core.RPCTransaction{ - Nonce: "0x7aa9", - GasPrice: "0x3b9aca00", - GasLimit: "0x7a120", - Recipient: "0xf88bbdc1e2718f8857f30a180076ec38d53cf296", - Amount: "0x0", - Payload: "0x18178358", - V: "0x78", - R: "0x79f6a78ababfdb37b87a4d52795a49b08b5b5171443d1f2fb8f373431e77439c", - S: "0x3f1a210dd3b59d161735a314b88568fa91552dfe207c00a2fdbcd52ccb081409", - Hash: "0x3b29ef265425d304069c57e5145cd1c7558568b06d231775f50a693bee1aad4f", - From: "0x694032e172d9b0ee6aff5d36749bad4947a36e4e", - TransactionIndex: "0xa", - } - - transactionModels, err := converter.ConvertRPCTransactionsToModels([]core.RPCTransaction{rpcTransaction}) - - Expect(err).NotTo(HaveOccurred()) - Expect(len(transactionModels)).To(Equal(1)) - model := transactionModels[0] - expectedRLP := []byte{248, 106, 130, 122, 169, 132, 59, 154, 202, 0, 131, 7, 161, 32, 148, 248, 139, 189, 193, - 226, 113, 143, 136, 87, 243, 10, 24, 0, 118, 236, 56, 213, 60, 242, 150, 128, 132, 24, 23, 131, 88, 120, 160, - 121, 246, 167, 138, 186, 191, 219, 55, 184, 122, 77, 82, 121, 90, 73, 176, 139, 91, 81, 113, 68, 61, 31, 47, - 184, 243, 115, 67, 30, 119, 67, 156, 160, 63, 26, 33, 13, 211, 181, 157, 22, 23, 53, 163, 20, 184, 133, 104, - 250, 145, 85, 45, 254, 32, 124, 0, 162, 253, 188, 213, 44, 203, 8, 20, 9} - Expect(model.Raw).To(Equal(expectedRLP)) - }) - - It("does not include transaction receipt", func() { - rpcTransaction := getFakeRpcTransaction("0x1") - - transactionModels, err := converter.ConvertRPCTransactionsToModels([]core.RPCTransaction{rpcTransaction}) - - Expect(err).NotTo(HaveOccurred()) - Expect(len(transactionModels)).To(Equal(1)) - Expect(transactionModels[0].Receipt).To(Equal(core.Receipt{})) - }) -}) - -func getFakeRpcTransaction(hex string) core.RPCTransaction { - return core.RPCTransaction{ - Hash: "0x2", - Amount: hex, - GasLimit: hex, - GasPrice: hex, - Nonce: hex, - From: fakes.FakeAddress.Hex(), - Recipient: fakes.FakeAddress.Hex(), - V: "0x2", - R: "0x2", - S: "0x2", - Payload: "0x12", - TransactionIndex: hex, - } -} diff --git a/pkg/eth/core/block.go b/pkg/eth/core/block.go deleted file mode 100644 index 1c378272..00000000 --- a/pkg/eth/core/block.go +++ /dev/null @@ -1,37 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package core - -type Block struct { - Reward string `db:"reward"` - Difficulty int64 `db:"difficulty"` - ExtraData string `db:"extra_data"` - GasLimit uint64 `db:"gas_limit"` - GasUsed uint64 `db:"gas_used"` - Hash string `db:"hash"` - IsFinal bool `db:"is_final"` - Miner string `db:"miner"` - Nonce string `db:"nonce"` - Number int64 `db:"number"` - ParentHash string `db:"parent_hash"` - Size string `db:"size"` - Time uint64 `db:"time"` - Transactions []TransactionModel - UncleHash string `db:"uncle_hash"` - UnclesReward string `db:"uncles_reward"` - Uncles []Uncle -} diff --git a/pkg/eth/core/blockchain.go b/pkg/eth/core/blockchain.go deleted file mode 100644 index b657c34f..00000000 --- a/pkg/eth/core/blockchain.go +++ /dev/null @@ -1,47 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package core - -import ( - "math/big" - - "github.com/ethereum/go-ethereum/common" - - "github.com/ethereum/go-ethereum" - "github.com/ethereum/go-ethereum/core/types" -) - -type BlockChain interface { - ContractDataFetcher - AccountDataFetcher - GetBlockByNumber(blockNumber int64) (Block, error) - GetEthLogsWithCustomQuery(query ethereum.FilterQuery) ([]types.Log, error) - GetHeaderByNumber(blockNumber int64) (Header, error) - GetHeadersByNumbers(blockNumbers []int64) ([]Header, error) - GetFullSyncLogs(contract Contract, startingBlockNumber *big.Int, endingBlockNumber *big.Int) ([]FullSyncLog, error) - GetTransactions(transactionHashes []common.Hash) ([]TransactionModel, error) - LastBlock() (*big.Int, error) - Node() Node -} - -type ContractDataFetcher interface { - FetchContractData(abiJSON string, address string, method string, methodArgs []interface{}, result interface{}, blockNumber int64) error -} - -type AccountDataFetcher interface { - GetAccountBalance(address common.Address, blockNumber *big.Int) (*big.Int, error) -} diff --git a/pkg/eth/core/contract.go b/pkg/eth/core/contract.go deleted file mode 100644 index 48646dbf..00000000 --- a/pkg/eth/core/contract.go +++ /dev/null @@ -1,23 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package core - -type Contract struct { - Abi string - Hash string - Transactions []TransactionModel -} diff --git a/pkg/eth/core/header.go b/pkg/eth/core/header.go deleted file mode 100644 index 73aa220e..00000000 --- a/pkg/eth/core/header.go +++ /dev/null @@ -1,48 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package core - -import ( - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/common/hexutil" - "github.com/ethereum/go-ethereum/core/types" -) - -type Header struct { - ID int64 - BlockNumber int64 `db:"block_number"` - Hash string - Raw []byte - Timestamp string `db:"block_timestamp"` -} - -type POAHeader struct { - ParentHash common.Hash `json:"parentHash" gencodec:"required"` - UncleHash common.Hash `json:"sha3Uncles" gencodec:"required"` - Coinbase common.Address `json:"miner" gencodec:"required"` - Root common.Hash `json:"stateRoot" gencodec:"required"` - TxHash common.Hash `json:"transactionsRoot" gencodec:"required"` - ReceiptHash common.Hash `json:"receiptsRoot" gencodec:"required"` - Bloom types.Bloom `json:"logsBloom" gencodec:"required"` - Difficulty *hexutil.Big `json:"difficulty" gencodec:"required"` - Number *hexutil.Big `json:"number" gencodec:"required"` - GasLimit hexutil.Uint64 `json:"gasLimit" gencodec:"required"` - GasUsed hexutil.Uint64 `json:"gasUsed" gencodec:"required"` - Time hexutil.Uint64 `json:"timestamp" gencodec:"required"` - Extra hexutil.Bytes `json:"extraData" gencodec:"required"` - Hash common.Hash `json:"hash"` -} diff --git a/pkg/eth/core/log.go b/pkg/eth/core/log.go deleted file mode 100644 index 7e21ffc3..00000000 --- a/pkg/eth/core/log.go +++ /dev/null @@ -1,35 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package core - -import "github.com/ethereum/go-ethereum/core/types" - -type FullSyncLog struct { - BlockNumber int64 - TxHash string - Address string - Topics - Index int64 - Data string -} - -type HeaderSyncLog struct { - ID int64 - HeaderID int64 `db:"header_id"` - Log types.Log - Transformed bool -} diff --git a/pkg/eth/core/receipts.go b/pkg/eth/core/receipts.go deleted file mode 100644 index cf9ff5d5..00000000 --- a/pkg/eth/core/receipts.go +++ /dev/null @@ -1,29 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package core - -type Receipt struct { - Bloom string - ContractAddress string `db:"contract_address"` - CumulativeGasUsed uint64 `db:"cumulative_gas_used"` - GasUsed uint64 `db:"gas_used"` - Logs []FullSyncLog - StateRoot string `db:"state_root"` - Status int - TxHash string `db:"tx_hash"` - Rlp []byte `db:"rlp"` -} diff --git a/pkg/eth/core/topics.go b/pkg/eth/core/topics.go deleted file mode 100644 index d1e2f11a..00000000 --- a/pkg/eth/core/topics.go +++ /dev/null @@ -1,19 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package core - -type Topics [4]string diff --git a/pkg/eth/core/transaction.go b/pkg/eth/core/transaction.go deleted file mode 100644 index 89c6e3cb..00000000 --- a/pkg/eth/core/transaction.go +++ /dev/null @@ -1,46 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package core - -type TransactionModel struct { - Data []byte `db:"input_data"` - From string `db:"tx_from"` - GasLimit uint64 `db:"gas_limit"` - GasPrice int64 `db:"gas_price"` - Hash string - Nonce uint64 - Raw []byte `db:"raw"` - Receipt - To string `db:"tx_to"` - TxIndex int64 `db:"tx_index"` - Value string -} - -type RPCTransaction struct { - Nonce string `json:"nonce"` - GasPrice string `json:"gasPrice"` - GasLimit string `json:"gas"` - Recipient string `json:"to"` - Amount string `json:"value"` - Payload string `json:"input"` - V string `json:"v"` - R string `json:"r"` - S string `json:"s"` - Hash string - From string - TransactionIndex string `json:"transactionIndex"` -} diff --git a/pkg/eth/core/uncle.go b/pkg/eth/core/uncle.go deleted file mode 100644 index 044e712d..00000000 --- a/pkg/eth/core/uncle.go +++ /dev/null @@ -1,26 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package core - -type Uncle struct { - ID int64 - Miner string - Reward string - Hash string - Timestamp string `db:"block_timestamp"` - Raw []byte -} diff --git a/pkg/eth/core/watched_event_log.go b/pkg/eth/core/watched_event_log.go deleted file mode 100644 index 695bb94e..00000000 --- a/pkg/eth/core/watched_event_log.go +++ /dev/null @@ -1,31 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package core - -type WatchedEvent struct { - LogID int64 `json:"log_id" db:"id"` - Name string `json:"name"` - BlockNumber int64 `json:"block_number" db:"block_number"` - Address string `json:"address"` - TxHash string `json:"tx_hash" db:"tx_hash"` - Index int64 `json:"index"` - Topic0 string `json:"topic0"` - Topic1 string `json:"topic1"` - Topic2 string `json:"topic2"` - Topic3 string `json:"topic3"` - Data string `json:"data"` -} diff --git a/pkg/eth/crypto/crypto_suite_test.go b/pkg/eth/crypto/crypto_suite_test.go deleted file mode 100644 index 25e15381..00000000 --- a/pkg/eth/crypto/crypto_suite_test.go +++ /dev/null @@ -1,29 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package crypto_test - -import ( - "testing" - - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" -) - -func TestCrypto(t *testing.T) { - RegisterFailHandler(Fail) - RunSpecs(t, "Crypto Suite") -} diff --git a/pkg/eth/crypto/parser.go b/pkg/eth/crypto/parser.go deleted file mode 100644 index bf65cb7f..00000000 --- a/pkg/eth/crypto/parser.go +++ /dev/null @@ -1,37 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package crypto - -import ( - "github.com/ethereum/go-ethereum/crypto" - "github.com/ethereum/go-ethereum/p2p/discv5" -) - -type PublicKeyParser interface { - ParsePublicKey(privateKey string) (string, error) -} - -type EthPublicKeyParser struct{} - -func (EthPublicKeyParser) ParsePublicKey(privateKey string) (string, error) { - np, err := crypto.HexToECDSA(privateKey) - if err != nil { - return "", err - } - pubKey := discv5.PubkeyID(&np.PublicKey) - return pubKey.String(), nil -} diff --git a/pkg/eth/crypto/parser_test.go b/pkg/eth/crypto/parser_test.go deleted file mode 100644 index 73a148af..00000000 --- a/pkg/eth/crypto/parser_test.go +++ /dev/null @@ -1,35 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package crypto_test - -import ( - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - "github.com/vulcanize/vulcanizedb/pkg/eth/crypto" -) - -var _ = Describe("Public key parser", func() { - It("parses public key from private key", func() { - privKey := "0000000000000000000000000000000000000000000000000000000000000001" - parser := crypto.EthPublicKeyParser{} - - pubKey, err := parser.ParsePublicKey(privKey) - - Expect(err).NotTo(HaveOccurred()) - Expect(pubKey).To(Equal("79be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798483ada7726a3c4655da4fbfc0e1108a8fd17b448a68554199c47d08ffb10d4b8")) - }) -}) diff --git a/pkg/eth/datastore/errors.go b/pkg/eth/datastore/errors.go deleted file mode 100644 index 97da92e5..00000000 --- a/pkg/eth/datastore/errors.go +++ /dev/null @@ -1,19 +0,0 @@ -package datastore - -import "fmt" - -func ErrBlockDoesNotExist(blockNumber int64) error { - return fmt.Errorf("Block number %d does not exist", blockNumber) -} - -func ErrContractDoesNotExist(contractHash string) error { - return fmt.Errorf("Contract %v does not exist", contractHash) -} - -func ErrFilterDoesNotExist(name string) error { - return fmt.Errorf("filter %s does not exist", name) -} - -func ErrReceiptDoesNotExist(txHash string) error { - return fmt.Errorf("Receipt for tx: %v does not exist", txHash) -} diff --git a/pkg/eth/datastore/ethereum/config.go b/pkg/eth/datastore/ethereum/config.go deleted file mode 100644 index ffc463ab..00000000 --- a/pkg/eth/datastore/ethereum/config.go +++ /dev/null @@ -1,35 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package ethereum - -type DatabaseType int - -const ( - Level DatabaseType = iota -) - -type DatabaseConfig struct { - Type DatabaseType - Path string -} - -func CreateDatabaseConfig(dbType DatabaseType, path string) DatabaseConfig { - return DatabaseConfig{ - Type: dbType, - Path: path, - } -} diff --git a/pkg/eth/datastore/ethereum/database.go b/pkg/eth/datastore/ethereum/database.go deleted file mode 100644 index 801e6047..00000000 --- a/pkg/eth/datastore/ethereum/database.go +++ /dev/null @@ -1,50 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package ethereum - -import ( - "fmt" - - "github.com/ethereum/go-ethereum/core/rawdb" - "github.com/sirupsen/logrus" - - "github.com/ethereum/go-ethereum/core/types" - "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/ethereum/level" -) - -type Database interface { - GetBlock(hash []byte, blockNumber int64) *types.Block - GetBlockHash(blockNumber int64) []byte - GetBlockReceipts(blockHash []byte, blockNumber int64) types.Receipts - GetHeadBlockNumber() int64 -} - -func CreateDatabase(config DatabaseConfig) (Database, error) { - switch config.Type { - case Level: - levelDBConnection, err := rawdb.NewLevelDBDatabase(config.Path, 128, 1024, "vdb") - if err != nil { - logrus.Error("CreateDatabase: error connecting to new LDBD: ", err) - return nil, err - } - levelDBReader := level.NewLevelDatabaseReader(levelDBConnection) - levelDB := level.NewLevelDatabase(levelDBReader) - return levelDB, nil - default: - return nil, fmt.Errorf("Unknown ethereum database: %s", config.Path) - } -} diff --git a/pkg/eth/datastore/ethereum/level/database.go b/pkg/eth/datastore/ethereum/level/database.go deleted file mode 100644 index a05d95b7..00000000 --- a/pkg/eth/datastore/ethereum/level/database.go +++ /dev/null @@ -1,56 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package level - -import ( - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/core/types" -) - -type LevelDatabase struct { - reader Reader -} - -func NewLevelDatabase(ldbReader Reader) *LevelDatabase { - return &LevelDatabase{ - reader: ldbReader, - } -} - -func (l LevelDatabase) GetBlock(blockHash []byte, blockNumber int64) *types.Block { - n := uint64(blockNumber) - h := common.BytesToHash(blockHash) - return l.reader.GetBlock(h, n) -} - -func (l LevelDatabase) GetBlockHash(blockNumber int64) []byte { - n := uint64(blockNumber) - h := l.reader.GetCanonicalHash(n) - return h.Bytes() -} - -func (l LevelDatabase) GetBlockReceipts(blockHash []byte, blockNumber int64) types.Receipts { - n := uint64(blockNumber) - h := common.BytesToHash(blockHash) - return l.reader.GetBlockReceipts(h, n) -} - -func (l LevelDatabase) GetHeadBlockNumber() int64 { - h := l.reader.GetHeadBlockHash() - n := l.reader.GetBlockNumber(h) - return int64(*n) -} diff --git a/pkg/eth/datastore/ethereum/level/database_reader.go b/pkg/eth/datastore/ethereum/level/database_reader.go deleted file mode 100644 index 9c91fdef..00000000 --- a/pkg/eth/datastore/ethereum/level/database_reader.go +++ /dev/null @@ -1,61 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package level - -import ( - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/core/rawdb" - "github.com/ethereum/go-ethereum/core/types" - "github.com/ethereum/go-ethereum/ethdb" - "github.com/ethereum/go-ethereum/params" -) - -type Reader interface { - GetBlock(hash common.Hash, number uint64) *types.Block - GetBlockNumber(hash common.Hash) *uint64 - GetBlockReceipts(hash common.Hash, number uint64) types.Receipts - GetCanonicalHash(number uint64) common.Hash - GetHeadBlockHash() common.Hash -} - -type LevelDatabaseReader struct { - reader ethdb.Reader -} - -func NewLevelDatabaseReader(reader ethdb.Reader) *LevelDatabaseReader { - return &LevelDatabaseReader{reader: reader} -} - -func (ldbr *LevelDatabaseReader) GetBlock(hash common.Hash, number uint64) *types.Block { - return rawdb.ReadBlock(ldbr.reader, hash, number) -} - -func (ldbr *LevelDatabaseReader) GetBlockNumber(hash common.Hash) *uint64 { - return rawdb.ReadHeaderNumber(ldbr.reader, hash) -} - -func (ldbr *LevelDatabaseReader) GetBlockReceipts(hash common.Hash, number uint64) types.Receipts { - return rawdb.ReadReceipts(ldbr.reader, hash, number, ¶ms.ChainConfig{}) -} - -func (ldbr *LevelDatabaseReader) GetCanonicalHash(number uint64) common.Hash { - return rawdb.ReadCanonicalHash(ldbr.reader, number) -} - -func (ldbr *LevelDatabaseReader) GetHeadBlockHash() common.Hash { - return rawdb.ReadHeadBlockHash(ldbr.reader) -} diff --git a/pkg/eth/datastore/ethereum/level/database_test.go b/pkg/eth/datastore/ethereum/level/database_test.go deleted file mode 100644 index 0827828f..00000000 --- a/pkg/eth/datastore/ethereum/level/database_test.go +++ /dev/null @@ -1,87 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package level_test - -import ( - "github.com/ethereum/go-ethereum/common" - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/ethereum/level" - "github.com/vulcanize/vulcanizedb/pkg/eth/fakes" -) - -var _ = Describe("Level database", func() { - Describe("Getting a block", func() { - It("converts block number to uint64 and hash to common.Hash to fetch block from reader", func() { - mockReader := fakes.NewMockLevelDatabaseReader() - ldb := level.NewLevelDatabase(mockReader) - blockHash := []byte{5, 4, 3, 2, 1} - blockNumber := int64(12345) - - ldb.GetBlock(blockHash, blockNumber) - - expectedBlockHash := common.BytesToHash(blockHash) - expectedBlockNumber := uint64(blockNumber) - mockReader.AssertGetBlockCalledWith(expectedBlockHash, expectedBlockNumber) - }) - }) - - Describe("Getting a block hash", func() { - It("converts block number to uint64 to fetch hash from reader", func() { - mockReader := fakes.NewMockLevelDatabaseReader() - ldb := level.NewLevelDatabase(mockReader) - blockNumber := int64(12345) - - ldb.GetBlockHash(blockNumber) - - expectedBlockNumber := uint64(blockNumber) - mockReader.AssertGetCanonicalHashCalledWith(expectedBlockNumber) - }) - }) - - Describe("Getting a block's receipts", func() { - It("converts block number to uint64 and hash to common.Hash to fetch receipts from reader", func() { - mockReader := fakes.NewMockLevelDatabaseReader() - ldb := level.NewLevelDatabase(mockReader) - blockHash := []byte{5, 4, 3, 2, 1} - blockNumber := int64(12345) - - ldb.GetBlockReceipts(blockHash, blockNumber) - - expectedBlockHash := common.BytesToHash(blockHash) - expectedBlockNumber := uint64(blockNumber) - mockReader.AssertGetBlockReceiptsCalledWith(expectedBlockHash, expectedBlockNumber) - }) - }) - - Describe("Getting the latest block number", func() { - It("invokes the database reader to get the latest block number by hash and converts result to int64", func() { - mockReader := fakes.NewMockLevelDatabaseReader() - fakeHash := common.BytesToHash([]byte{1, 2, 3, 4, 5}) - mockReader.SetHeadBlockHashReturnHash(fakeHash) - fakeBlockNumber := uint64(123456789) - mockReader.SetReturnBlockNumber(fakeBlockNumber) - ldb := level.NewLevelDatabase(mockReader) - - result := ldb.GetHeadBlockNumber() - - mockReader.AssertGetHeadBlockHashCalled() - mockReader.AssertGetBlockNumberCalledWith(fakeHash) - Expect(result).To(Equal(int64(fakeBlockNumber))) - }) - }) -}) diff --git a/pkg/eth/datastore/ethereum/level/level_suite_test.go b/pkg/eth/datastore/ethereum/level/level_suite_test.go deleted file mode 100644 index 7aac15b2..00000000 --- a/pkg/eth/datastore/ethereum/level/level_suite_test.go +++ /dev/null @@ -1,29 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package level_test - -import ( - "testing" - - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" -) - -func TestLevel(t *testing.T) { - RegisterFailHandler(Fail) - RunSpecs(t, "Level Suite") -} diff --git a/pkg/eth/datastore/postgres/repositories/checked_headers_repository.go b/pkg/eth/datastore/postgres/repositories/checked_headers_repository.go deleted file mode 100644 index b18fdff6..00000000 --- a/pkg/eth/datastore/postgres/repositories/checked_headers_repository.go +++ /dev/null @@ -1,72 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package repositories - -import ( - "github.com/vulcanize/vulcanizedb/pkg/eth/core" - "github.com/vulcanize/vulcanizedb/pkg/postgres" -) - -const ( - insertCheckedHeaderQuery = `UPDATE public.headers SET check_count = (SELECT check_count WHERE id = $1) + 1 WHERE id = $1` -) - -type CheckedHeadersRepository struct { - db *postgres.DB -} - -func NewCheckedHeadersRepository(db *postgres.DB) CheckedHeadersRepository { - return CheckedHeadersRepository{db: db} -} - -// Increment check_count for header -func (repo CheckedHeadersRepository) MarkHeaderChecked(headerID int64) error { - _, err := repo.db.Exec(insertCheckedHeaderQuery, headerID) - return err -} - -// Zero out check count for headers with block number >= startingBlockNumber -func (repo CheckedHeadersRepository) MarkHeadersUnchecked(startingBlockNumber int64) error { - _, err := repo.db.Exec(`UPDATE public.headers SET check_count = 0 WHERE block_number >= $1`, startingBlockNumber) - return err -} - -// Return header if check_count < passed checkCount -func (repo CheckedHeadersRepository) UncheckedHeaders(startingBlockNumber, endingBlockNumber, checkCount int64) ([]core.Header, error) { - var result []core.Header - var query string - var err error - - if endingBlockNumber == -1 { - query = `SELECT id, block_number, hash - FROM headers - WHERE check_count < $2 - AND block_number >= $1 - AND eth_node_fingerprint = $3` - err = repo.db.Select(&result, query, startingBlockNumber, checkCount, repo.db.Node.ID) - } else { - query = `SELECT id, block_number, hash - FROM headers - WHERE check_count < $3 - AND block_number >= $1 - AND block_number <= $2 - AND eth_node_fingerprint = $4` - err = repo.db.Select(&result, query, startingBlockNumber, endingBlockNumber, checkCount, repo.db.Node.ID) - } - - return result, err -} diff --git a/pkg/eth/datastore/postgres/repositories/checked_headers_repository_test.go b/pkg/eth/datastore/postgres/repositories/checked_headers_repository_test.go deleted file mode 100644 index ee6f60ac..00000000 --- a/pkg/eth/datastore/postgres/repositories/checked_headers_repository_test.go +++ /dev/null @@ -1,272 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package repositories_test - -import ( - "math/rand" - - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" - "github.com/vulcanize/vulcanizedb/pkg/eth/datastore" - "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres/repositories" - "github.com/vulcanize/vulcanizedb/pkg/eth/fakes" - "github.com/vulcanize/vulcanizedb/pkg/postgres" - "github.com/vulcanize/vulcanizedb/test_config" -) - -var _ = Describe("Checked Headers repository", func() { - var ( - db *postgres.DB - repo datastore.CheckedHeadersRepository - ) - - BeforeEach(func() { - db = test_config.NewTestDB(test_config.NewTestNode()) - test_config.CleanTestDB(db) - repo = repositories.NewCheckedHeadersRepository(db) - }) - - AfterEach(func() { - closeErr := db.Close() - Expect(closeErr).NotTo(HaveOccurred()) - }) - - Describe("MarkHeaderChecked", func() { - It("marks passed header as checked on insert", func() { - headerRepository := repositories.NewHeaderRepository(db) - headerID, headerErr := headerRepository.CreateOrUpdateHeader(fakes.FakeHeader) - Expect(headerErr).NotTo(HaveOccurred()) - - err := repo.MarkHeaderChecked(headerID) - - Expect(err).NotTo(HaveOccurred()) - var checkedCount int - fetchErr := db.Get(&checkedCount, `SELECT check_count FROM public.headers WHERE id = $1`, headerID) - Expect(fetchErr).NotTo(HaveOccurred()) - Expect(checkedCount).To(Equal(1)) - }) - - It("increments check count on update", func() { - headerRepository := repositories.NewHeaderRepository(db) - headerID, headerErr := headerRepository.CreateOrUpdateHeader(fakes.FakeHeader) - Expect(headerErr).NotTo(HaveOccurred()) - - insertErr := repo.MarkHeaderChecked(headerID) - Expect(insertErr).NotTo(HaveOccurred()) - - updateErr := repo.MarkHeaderChecked(headerID) - Expect(updateErr).NotTo(HaveOccurred()) - - var checkedCount int - fetchErr := db.Get(&checkedCount, `SELECT check_count FROM public.headers WHERE id = $1`, headerID) - Expect(fetchErr).NotTo(HaveOccurred()) - Expect(checkedCount).To(Equal(2)) - }) - }) - - Describe("MarkHeadersUnchecked", func() { - It("removes rows for headers <= starting block number", func() { - blockNumberOne := rand.Int63() - blockNumberTwo := blockNumberOne + 1 - blockNumberThree := blockNumberOne + 2 - fakeHeaderOne := fakes.GetFakeHeader(blockNumberOne) - fakeHeaderTwo := fakes.GetFakeHeader(blockNumberTwo) - fakeHeaderThree := fakes.GetFakeHeader(blockNumberThree) - headerRepository := repositories.NewHeaderRepository(db) - // insert three headers with incrementing block number - headerIdOne, insertHeaderOneErr := headerRepository.CreateOrUpdateHeader(fakeHeaderOne) - Expect(insertHeaderOneErr).NotTo(HaveOccurred()) - headerIdTwo, insertHeaderTwoErr := headerRepository.CreateOrUpdateHeader(fakeHeaderTwo) - Expect(insertHeaderTwoErr).NotTo(HaveOccurred()) - headerIdThree, insertHeaderThreeErr := headerRepository.CreateOrUpdateHeader(fakeHeaderThree) - Expect(insertHeaderThreeErr).NotTo(HaveOccurred()) - // mark all headers checked - markHeaderOneCheckedErr := repo.MarkHeaderChecked(headerIdOne) - Expect(markHeaderOneCheckedErr).NotTo(HaveOccurred()) - markHeaderTwoCheckedErr := repo.MarkHeaderChecked(headerIdTwo) - Expect(markHeaderTwoCheckedErr).NotTo(HaveOccurred()) - markHeaderThreeCheckedErr := repo.MarkHeaderChecked(headerIdThree) - Expect(markHeaderThreeCheckedErr).NotTo(HaveOccurred()) - - // mark headers unchecked since blockNumberTwo - err := repo.MarkHeadersUnchecked(blockNumberTwo) - - Expect(err).NotTo(HaveOccurred()) - var headerOneCheckCount, headerTwoCheckCount, headerThreeCheckCount int - getHeaderOneErr := db.Get(&headerOneCheckCount, `SELECT check_count FROM public.headers WHERE id = $1`, headerIdOne) - Expect(getHeaderOneErr).NotTo(HaveOccurred()) - Expect(headerOneCheckCount).To(Equal(1)) - getHeaderTwoErr := db.Get(&headerTwoCheckCount, `SELECT check_count FROM public.headers WHERE id = $1`, headerIdTwo) - Expect(getHeaderTwoErr).NotTo(HaveOccurred()) - Expect(headerTwoCheckCount).To(BeZero()) - getHeaderThreeErr := db.Get(&headerThreeCheckCount, `SELECT check_count FROM public.headers WHERE id = $1`, headerIdThree) - Expect(getHeaderThreeErr).NotTo(HaveOccurred()) - Expect(headerThreeCheckCount).To(BeZero()) - }) - }) - - Describe("UncheckedHeaders", func() { - var ( - headerRepository datastore.HeaderRepository - startingBlockNumber int64 - endingBlockNumber int64 - middleBlockNumber int64 - outOfRangeBlockNumber int64 - blockNumbers []int64 - headerIDs []int64 - err error - uncheckedCheckCount = int64(1) - recheckCheckCount = int64(2) - ) - - BeforeEach(func() { - headerRepository = repositories.NewHeaderRepository(db) - - startingBlockNumber = rand.Int63() - middleBlockNumber = startingBlockNumber + 1 - endingBlockNumber = startingBlockNumber + 2 - outOfRangeBlockNumber = endingBlockNumber + 1 - - blockNumbers = []int64{startingBlockNumber, middleBlockNumber, endingBlockNumber, outOfRangeBlockNumber} - - headerIDs = []int64{} - for _, n := range blockNumbers { - headerID, err := headerRepository.CreateOrUpdateHeader(fakes.GetFakeHeader(n)) - headerIDs = append(headerIDs, headerID) - Expect(err).NotTo(HaveOccurred()) - } - }) - - Describe("when ending block is specified", func() { - It("excludes headers that are out of range", func() { - headers, err := repo.UncheckedHeaders(startingBlockNumber, endingBlockNumber, uncheckedCheckCount) - Expect(err).NotTo(HaveOccurred()) - - headerBlockNumbers := getBlockNumbers(headers) - Expect(headerBlockNumbers).To(ConsistOf(startingBlockNumber, middleBlockNumber, endingBlockNumber)) - Expect(headerBlockNumbers).NotTo(ContainElement(outOfRangeBlockNumber)) - }) - - It("excludes headers that have been checked more than the check count", func() { - _, err = db.Exec(`UPDATE public.headers SET check_count = 1 WHERE id = $1`, headerIDs[1]) - Expect(err).NotTo(HaveOccurred()) - - headers, err := repo.UncheckedHeaders(startingBlockNumber, endingBlockNumber, uncheckedCheckCount) - Expect(err).NotTo(HaveOccurred()) - - headerBlockNumbers := getBlockNumbers(headers) - Expect(headerBlockNumbers).To(ConsistOf(startingBlockNumber, endingBlockNumber)) - Expect(headerBlockNumbers).NotTo(ContainElement(middleBlockNumber)) - }) - - It("does not exclude headers that have been checked less than the check count", func() { - _, err = db.Exec(`UPDATE public.headers SET check_count = 1 WHERE id = $1`, headerIDs[1]) - Expect(err).NotTo(HaveOccurred()) - - headers, err := repo.UncheckedHeaders(startingBlockNumber, endingBlockNumber, recheckCheckCount) - Expect(err).NotTo(HaveOccurred()) - - headerBlockNumbers := getBlockNumbers(headers) - Expect(headerBlockNumbers).To(ConsistOf(startingBlockNumber, middleBlockNumber, endingBlockNumber)) - }) - - It("only returns headers associated with the current node", func() { - dbTwo := test_config.NewTestDB(core.Node{ID: "second"}) - headerRepositoryTwo := repositories.NewHeaderRepository(dbTwo) - repoTwo := repositories.NewCheckedHeadersRepository(dbTwo) - for _, n := range blockNumbers { - _, err = headerRepositoryTwo.CreateOrUpdateHeader(fakes.GetFakeHeader(n + 10)) - Expect(err).NotTo(HaveOccurred()) - } - - nodeOneMissingHeaders, err := repo.UncheckedHeaders(startingBlockNumber, endingBlockNumber, uncheckedCheckCount) - Expect(err).NotTo(HaveOccurred()) - nodeOneHeaderBlockNumbers := getBlockNumbers(nodeOneMissingHeaders) - Expect(nodeOneHeaderBlockNumbers).To(ConsistOf(startingBlockNumber, middleBlockNumber, endingBlockNumber)) - - nodeTwoMissingHeaders, err := repoTwo.UncheckedHeaders(startingBlockNumber, endingBlockNumber+10, uncheckedCheckCount) - Expect(err).NotTo(HaveOccurred()) - nodeTwoHeaderBlockNumbers := getBlockNumbers(nodeTwoMissingHeaders) - Expect(nodeTwoHeaderBlockNumbers).To(ConsistOf(startingBlockNumber+10, middleBlockNumber+10, endingBlockNumber+10)) - }) - }) - - Describe("when ending block is -1", func() { - var endingBlock = int64(-1) - - It("includes all non-checked headers when ending block is -1 ", func() { - headers, err := repo.UncheckedHeaders(startingBlockNumber, endingBlock, uncheckedCheckCount) - Expect(err).NotTo(HaveOccurred()) - - headerBlockNumbers := getBlockNumbers(headers) - Expect(headerBlockNumbers).To(ConsistOf(startingBlockNumber, middleBlockNumber, endingBlockNumber, outOfRangeBlockNumber)) - }) - - It("excludes headers that have been checked more than the check count", func() { - _, err = db.Exec(`UPDATE public.headers SET check_count = 1 WHERE id = $1`, headerIDs[1]) - Expect(err).NotTo(HaveOccurred()) - - headers, err := repo.UncheckedHeaders(startingBlockNumber, endingBlock, uncheckedCheckCount) - Expect(err).NotTo(HaveOccurred()) - - headerBlockNumbers := getBlockNumbers(headers) - Expect(headerBlockNumbers).To(ConsistOf(startingBlockNumber, endingBlockNumber, outOfRangeBlockNumber)) - Expect(headerBlockNumbers).NotTo(ContainElement(middleBlockNumber)) - }) - - It("does not exclude headers that have been checked less than the check count", func() { - _, err = db.Exec(`UPDATE public.headers SET check_count = 1 WHERE id = $1`, headerIDs[1]) - Expect(err).NotTo(HaveOccurred()) - - headers, err := repo.UncheckedHeaders(startingBlockNumber, endingBlock, recheckCheckCount) - Expect(err).NotTo(HaveOccurred()) - - headerBlockNumbers := getBlockNumbers(headers) - Expect(headerBlockNumbers).To(ConsistOf(startingBlockNumber, middleBlockNumber, endingBlockNumber, outOfRangeBlockNumber)) - }) - - It("only returns headers associated with the current node", func() { - dbTwo := test_config.NewTestDB(core.Node{ID: "second"}) - headerRepositoryTwo := repositories.NewHeaderRepository(dbTwo) - repoTwo := repositories.NewCheckedHeadersRepository(dbTwo) - for _, n := range blockNumbers { - _, err = headerRepositoryTwo.CreateOrUpdateHeader(fakes.GetFakeHeader(n + 10)) - Expect(err).NotTo(HaveOccurred()) - } - - nodeOneMissingHeaders, err := repo.UncheckedHeaders(startingBlockNumber, endingBlock, uncheckedCheckCount) - Expect(err).NotTo(HaveOccurred()) - nodeOneBlockNumbers := getBlockNumbers(nodeOneMissingHeaders) - Expect(nodeOneBlockNumbers).To(ConsistOf(startingBlockNumber, middleBlockNumber, endingBlockNumber, outOfRangeBlockNumber)) - - nodeTwoMissingHeaders, err := repoTwo.UncheckedHeaders(startingBlockNumber, endingBlock, uncheckedCheckCount) - Expect(err).NotTo(HaveOccurred()) - nodeTwoBlockNumbers := getBlockNumbers(nodeTwoMissingHeaders) - Expect(nodeTwoBlockNumbers).To(ConsistOf(startingBlockNumber+10, middleBlockNumber+10, endingBlockNumber+10, outOfRangeBlockNumber+10)) - }) - }) - }) -}) - -func getBlockNumbers(headers []core.Header) []int64 { - var headerBlockNumbers []int64 - for _, header := range headers { - headerBlockNumbers = append(headerBlockNumbers, header.BlockNumber) - } - return headerBlockNumbers -} diff --git a/pkg/eth/datastore/postgres/repositories/header_repository.go b/pkg/eth/datastore/postgres/repositories/header_repository.go deleted file mode 100644 index d740086c..00000000 --- a/pkg/eth/datastore/postgres/repositories/header_repository.go +++ /dev/null @@ -1,158 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package repositories - -import ( - "database/sql" - "errors" - - "github.com/jmoiron/sqlx" - log "github.com/sirupsen/logrus" - - "github.com/vulcanize/vulcanizedb/pkg/eth/core" - "github.com/vulcanize/vulcanizedb/pkg/postgres" -) - -var ErrValidHeaderExists = errors.New("valid header already exists") - -type HeaderRepository struct { - database *postgres.DB -} - -func NewHeaderRepository(database *postgres.DB) HeaderRepository { - return HeaderRepository{database: database} -} - -func (repository HeaderRepository) CreateOrUpdateHeader(header core.Header) (int64, error) { - hash, err := repository.getHeaderHash(header) - if err != nil { - if headerDoesNotExist(err) { - return repository.InternalInsertHeader(header) - } - log.Error("CreateOrUpdateHeader: error getting header hash: ", err) - return 0, err - } - if headerMustBeReplaced(hash, header) { - return repository.replaceHeader(header) - } - return 0, ErrValidHeaderExists -} - -func (repository HeaderRepository) CreateTransactions(headerID int64, transactions []core.TransactionModel) error { - for _, transaction := range transactions { - _, err := repository.database.Exec(`INSERT INTO public.header_sync_transactions - (header_id, hash, gas_limit, gas_price, input_data, nonce, raw, tx_from, tx_index, tx_to, "value") - VALUES ($1, $2, $3::NUMERIC, $4::NUMERIC, $5, $6::NUMERIC, $7, $8, $9::NUMERIC, $10, $11::NUMERIC) - ON CONFLICT DO NOTHING`, headerID, transaction.Hash, transaction.GasLimit, transaction.GasPrice, - transaction.Data, transaction.Nonce, transaction.Raw, transaction.From, transaction.TxIndex, transaction.To, - transaction.Value) - if err != nil { - return err - } - } - return nil -} - -func (repository HeaderRepository) CreateTransactionInTx(tx *sqlx.Tx, headerID int64, transaction core.TransactionModel) (int64, error) { - var txID int64 - err := tx.QueryRowx(`INSERT INTO public.header_sync_transactions - (header_id, hash, gas_limit, gas_price, input_data, nonce, raw, tx_from, tx_index, tx_to, "value") - VALUES ($1, $2, $3::NUMERIC, $4::NUMERIC, $5, $6::NUMERIC, $7, $8, $9::NUMERIC, $10, $11::NUMERIC) - ON CONFLICT (header_id, hash) DO UPDATE - SET (gas_limit, gas_price, input_data, nonce, raw, tx_from, tx_index, tx_to, "value") = ($3::NUMERIC, $4::NUMERIC, $5, $6::NUMERIC, $7, $8, $9::NUMERIC, $10, $11::NUMERIC) - RETURNING id`, - headerID, transaction.Hash, transaction.GasLimit, transaction.GasPrice, - transaction.Data, transaction.Nonce, transaction.Raw, transaction.From, - transaction.TxIndex, transaction.To, transaction.Value).Scan(&txID) - if err != nil { - log.Error("header_repository: error inserting transaction: ", err) - return txID, err - } - return txID, err -} - -func (repository HeaderRepository) GetHeader(blockNumber int64) (core.Header, error) { - var header core.Header - err := repository.database.Get(&header, `SELECT id, block_number, hash, raw, block_timestamp FROM headers WHERE block_number = $1 AND eth_node_fingerprint = $2`, - blockNumber, repository.database.Node.ID) - if err != nil { - log.Error("GetHeader: error getting headers: ", err) - } - return header, err -} - -func (repository HeaderRepository) MissingBlockNumbers(startingBlockNumber, endingBlockNumber int64, nodeID string) ([]int64, error) { - numbers := make([]int64, 0) - err := repository.database.Select(&numbers, - `SELECT series.block_number - FROM (SELECT generate_series($1::INT, $2::INT) AS block_number) AS series - LEFT OUTER JOIN (SELECT block_number FROM headers - WHERE eth_node_fingerprint = $3) AS synced - USING (block_number) - WHERE synced.block_number IS NULL`, - startingBlockNumber, endingBlockNumber, nodeID) - if err != nil { - log.Errorf("MissingBlockNumbers failed to get blocks between %v - %v for node %v", - startingBlockNumber, endingBlockNumber, nodeID) - return []int64{}, err - } - return numbers, nil -} - -func headerMustBeReplaced(hash string, header core.Header) bool { - return hash != header.Hash -} - -func headerDoesNotExist(err error) bool { - return err == sql.ErrNoRows -} - -func (repository HeaderRepository) getHeaderHash(header core.Header) (string, error) { - var hash string - err := repository.database.Get(&hash, `SELECT hash FROM headers WHERE block_number = $1 AND eth_node_fingerprint = $2`, - header.BlockNumber, repository.database.Node.ID) - return hash, err -} - -// Function is public so we can test insert being called for the same header -// Can happen when concurrent processes are inserting headers -// Otherwise should not occur since only called in CreateOrUpdateHeader -func (repository HeaderRepository) InternalInsertHeader(header core.Header) (int64, error) { - var headerID int64 - row := repository.database.QueryRowx( - `INSERT INTO public.headers (block_number, hash, block_timestamp, raw, node_id, eth_node_fingerprint) - VALUES ($1, $2, $3::NUMERIC, $4, $5, $6) ON CONFLICT DO NOTHING RETURNING id`, - header.BlockNumber, header.Hash, header.Timestamp, header.Raw, repository.database.NodeID, repository.database.Node.ID) - err := row.Scan(&headerID) - if err != nil { - if err == sql.ErrNoRows { - return 0, ErrValidHeaderExists - } - log.Error("InternalInsertHeader: error inserting header: ", err) - } - return headerID, err -} - -func (repository HeaderRepository) replaceHeader(header core.Header) (int64, error) { - _, err := repository.database.Exec(`DELETE FROM headers WHERE block_number = $1 AND eth_node_fingerprint = $2`, - header.BlockNumber, repository.database.Node.ID) - if err != nil { - log.Error("replaceHeader: error deleting headers: ", err) - return 0, err - } - return repository.InternalInsertHeader(header) -} diff --git a/pkg/eth/datastore/postgres/repositories/header_repository_test.go b/pkg/eth/datastore/postgres/repositories/header_repository_test.go deleted file mode 100644 index fc47e20d..00000000 --- a/pkg/eth/datastore/postgres/repositories/header_repository_test.go +++ /dev/null @@ -1,511 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package repositories_test - -import ( - "database/sql" - "encoding/json" - "math/big" - - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/core/types" - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - - "github.com/vulcanize/vulcanizedb/pkg/eth/core" - "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres/repositories" - "github.com/vulcanize/vulcanizedb/pkg/postgres" - "github.com/vulcanize/vulcanizedb/test_config" -) - -var _ = Describe("Block header repository", func() { - var ( - rawHeader []byte - err error - timestamp string - db *postgres.DB - repo repositories.HeaderRepository - header core.Header - ) - - BeforeEach(func() { - rawHeader, err = json.Marshal(types.Header{}) - Expect(err).NotTo(HaveOccurred()) - timestamp = big.NewInt(123456789).String() - - db = test_config.NewTestDB(test_config.NewTestNode()) - test_config.CleanTestDB(db) - repo = repositories.NewHeaderRepository(db) - header = core.Header{ - BlockNumber: 100, - Hash: common.BytesToHash([]byte{1, 2, 3, 4, 5}).Hex(), - Raw: rawHeader, - Timestamp: timestamp, - } - }) - - Describe("creating or updating a header", func() { - It("adds a header", func() { - _, err = repo.CreateOrUpdateHeader(header) - Expect(err).NotTo(HaveOccurred()) - var dbHeader core.Header - err = db.Get(&dbHeader, `SELECT block_number, hash, raw, block_timestamp FROM public.headers WHERE block_number = $1`, header.BlockNumber) - Expect(err).NotTo(HaveOccurred()) - Expect(dbHeader.BlockNumber).To(Equal(header.BlockNumber)) - Expect(dbHeader.Hash).To(Equal(header.Hash)) - Expect(dbHeader.Raw).To(MatchJSON(header.Raw)) - Expect(dbHeader.Timestamp).To(Equal(header.Timestamp)) - }) - - It("adds node data to header", func() { - _, err = repo.CreateOrUpdateHeader(header) - Expect(err).NotTo(HaveOccurred()) - var ethNodeId int64 - err = db.Get(ðNodeId, `SELECT node_id FROM public.headers WHERE block_number = $1`, header.BlockNumber) - Expect(err).NotTo(HaveOccurred()) - Expect(ethNodeId).To(Equal(db.NodeID)) - var ethNodeFingerprint string - err = db.Get(ðNodeFingerprint, `SELECT eth_node_fingerprint FROM public.headers WHERE block_number = $1`, header.BlockNumber) - Expect(err).NotTo(HaveOccurred()) - Expect(ethNodeFingerprint).To(Equal(db.Node.ID)) - }) - - It("returns valid header exists error if attempting duplicate headers", func() { - _, err = repo.CreateOrUpdateHeader(header) - Expect(err).NotTo(HaveOccurred()) - - _, err = repo.CreateOrUpdateHeader(header) - Expect(err).To(HaveOccurred()) - Expect(err).To(MatchError(repositories.ErrValidHeaderExists)) - - var dbHeaders []core.Header - err = db.Select(&dbHeaders, `SELECT block_number, hash, raw FROM public.headers WHERE block_number = $1`, header.BlockNumber) - Expect(err).NotTo(HaveOccurred()) - Expect(len(dbHeaders)).To(Equal(1)) - }) - - It("does not duplicate headers in concurrent insert", func() { - _, err = repo.InternalInsertHeader(header) - Expect(err).NotTo(HaveOccurred()) - - _, err = repo.InternalInsertHeader(header) - Expect(err).To(HaveOccurred()) - Expect(err).To(MatchError(repositories.ErrValidHeaderExists)) - - var dbHeaders []core.Header - err = db.Select(&dbHeaders, `SELECT block_number, hash, raw FROM public.headers WHERE block_number = $1`, header.BlockNumber) - Expect(err).NotTo(HaveOccurred()) - Expect(len(dbHeaders)).To(Equal(1)) - }) - - It("replaces header if hash is different", func() { - _, err = repo.CreateOrUpdateHeader(header) - Expect(err).NotTo(HaveOccurred()) - - headerTwo := core.Header{ - BlockNumber: header.BlockNumber, - Hash: common.BytesToHash([]byte{5, 4, 3, 2, 1}).Hex(), - Raw: rawHeader, - Timestamp: timestamp, - } - - _, err = repo.CreateOrUpdateHeader(headerTwo) - - Expect(err).NotTo(HaveOccurred()) - var dbHeader core.Header - err = db.Get(&dbHeader, `SELECT block_number, hash, raw FROM headers WHERE block_number = $1`, header.BlockNumber) - Expect(err).NotTo(HaveOccurred()) - Expect(dbHeader.Hash).To(Equal(headerTwo.Hash)) - Expect(dbHeader.Raw).To(MatchJSON(headerTwo.Raw)) - }) - - It("does not replace header if node fingerprint is different", func() { - _, err = repo.CreateOrUpdateHeader(header) - Expect(err).NotTo(HaveOccurred()) - nodeTwo := core.Node{ID: "FingerprintTwo"} - dbTwo, err := postgres.NewDB(test_config.DBConfig, nodeTwo) - Expect(err).NotTo(HaveOccurred()) - - repoTwo := repositories.NewHeaderRepository(dbTwo) - headerTwo := core.Header{ - BlockNumber: header.BlockNumber, - Hash: common.BytesToHash([]byte{5, 4, 3, 2, 1}).Hex(), - Raw: rawHeader, - Timestamp: timestamp, - } - - _, err = repoTwo.CreateOrUpdateHeader(headerTwo) - - Expect(err).NotTo(HaveOccurred()) - var dbHeaders []core.Header - err = dbTwo.Select(&dbHeaders, `SELECT block_number, hash, raw FROM headers WHERE block_number = $1`, header.BlockNumber) - Expect(err).NotTo(HaveOccurred()) - Expect(len(dbHeaders)).To(Equal(2)) - }) - - It("only replaces header with matching node fingerprint", func() { - _, err = repo.CreateOrUpdateHeader(header) - Expect(err).NotTo(HaveOccurred()) - - nodeTwo := core.Node{ID: "FingerprintTwo"} - dbTwo, err := postgres.NewDB(test_config.DBConfig, nodeTwo) - Expect(err).NotTo(HaveOccurred()) - - repoTwo := repositories.NewHeaderRepository(dbTwo) - headerTwo := core.Header{ - BlockNumber: header.BlockNumber, - Hash: common.BytesToHash([]byte{5, 4, 3, 2, 1}).Hex(), - Raw: rawHeader, - Timestamp: timestamp, - } - _, err = repoTwo.CreateOrUpdateHeader(headerTwo) - headerThree := core.Header{ - BlockNumber: header.BlockNumber, - Hash: common.BytesToHash([]byte{1, 1, 1, 1, 1}).Hex(), - Raw: rawHeader, - Timestamp: timestamp, - } - - _, err = repoTwo.CreateOrUpdateHeader(headerThree) - - Expect(err).NotTo(HaveOccurred()) - var dbHeaders []core.Header - err = dbTwo.Select(&dbHeaders, `SELECT block_number, hash, raw FROM headers WHERE block_number = $1`, header.BlockNumber) - Expect(err).NotTo(HaveOccurred()) - Expect(len(dbHeaders)).To(Equal(2)) - Expect(dbHeaders[0].Hash).To(Or(Equal(header.Hash), Equal(headerThree.Hash))) - Expect(dbHeaders[1].Hash).To(Or(Equal(header.Hash), Equal(headerThree.Hash))) - Expect(dbHeaders[0].Raw).To(Or(MatchJSON(header.Raw), MatchJSON(headerThree.Raw))) - Expect(dbHeaders[1].Raw).To(Or(MatchJSON(header.Raw), MatchJSON(headerThree.Raw))) - }) - }) - - Describe("creating a receipt", func() { - It("adds a receipt in a tx", func() { - headerID, err := repo.CreateOrUpdateHeader(header) - Expect(err).NotTo(HaveOccurred()) - fromAddress := common.HexToAddress("0x1234") - toAddress := common.HexToAddress("0x5678") - txHash := common.HexToHash("0x9876") - txIndex := big.NewInt(123) - transaction := core.TransactionModel{ - Data: []byte{}, - From: fromAddress.Hex(), - GasLimit: 0, - GasPrice: 0, - Hash: txHash.Hex(), - Nonce: 0, - Raw: []byte{}, - To: toAddress.Hex(), - TxIndex: txIndex.Int64(), - Value: "0", - } - tx, err := db.Beginx() - Expect(err).ToNot(HaveOccurred()) - txId, txErr := repo.CreateTransactionInTx(tx, headerID, transaction) - Expect(txErr).ToNot(HaveOccurred()) - - contractAddr := common.HexToAddress("0x1234") - stateRoot := common.HexToHash("0x5678") - receipt := core.Receipt{ - ContractAddress: contractAddr.Hex(), - TxHash: txHash.Hex(), - GasUsed: 10, - CumulativeGasUsed: 100, - StateRoot: stateRoot.Hex(), - Rlp: []byte{1, 2, 3}, - } - - receiptRepo := repositories.HeaderSyncReceiptRepository{} - _, receiptErr := receiptRepo.CreateHeaderSyncReceiptInTx(headerID, txId, receipt, tx) - Expect(receiptErr).ToNot(HaveOccurred()) - commitErr := tx.Commit() - Expect(commitErr).ToNot(HaveOccurred()) - - type idModel struct { - TransactionId int64 `db:"transaction_id"` - ContractAddressId int64 `db:"contract_address_id"` - CumulativeGasUsed uint64 `db:"cumulative_gas_used"` - GasUsed uint64 `db:"gas_used"` - StateRoot string `db:"state_root"` - Status int - TxHash string `db:"tx_hash"` - Rlp []byte `db:"rlp"` - } - - var addressId int64 - getAddressErr := db.Get(&addressId, `SELECT id FROM addresses WHERE address = $1`, contractAddr.Hex()) - Expect(getAddressErr).NotTo(HaveOccurred()) - - var dbReceipt idModel - getReceiptErr := db.Get(&dbReceipt, - `SELECT transaction_id, contract_address_id, cumulative_gas_used, gas_used, state_root, status, tx_hash, rlp - FROM public.header_sync_receipts WHERE header_id = $1`, headerID) - Expect(getReceiptErr).NotTo(HaveOccurred()) - - Expect(dbReceipt.TransactionId).To(Equal(txId)) - Expect(dbReceipt.TxHash).To(Equal(txHash.Hex())) - Expect(dbReceipt.ContractAddressId).To(Equal(addressId)) - Expect(dbReceipt.CumulativeGasUsed).To(Equal(uint64(100))) - Expect(dbReceipt.GasUsed).To(Equal(uint64(10))) - Expect(dbReceipt.StateRoot).To(Equal(stateRoot.Hex())) - Expect(dbReceipt.Status).To(Equal(0)) - Expect(dbReceipt.Rlp).To(Equal([]byte{1, 2, 3})) - }) - }) - - Describe("creating a transaction", func() { - var ( - headerID int64 - transactions []core.TransactionModel - ) - - BeforeEach(func() { - var err error - headerID, err = repo.CreateOrUpdateHeader(header) - Expect(err).NotTo(HaveOccurred()) - fromAddress := common.HexToAddress("0x1234") - toAddress := common.HexToAddress("0x5678") - txHash := common.HexToHash("0x9876") - txHashTwo := common.HexToHash("0x5432") - txIndex := big.NewInt(123) - transactions = []core.TransactionModel{{ - Data: []byte{}, - From: fromAddress.Hex(), - GasLimit: 0, - GasPrice: 0, - Hash: txHash.Hex(), - Nonce: 0, - Raw: []byte{}, - To: toAddress.Hex(), - TxIndex: txIndex.Int64(), - Value: "0", - Receipt: core.Receipt{}, - }, { - Data: []byte{}, - From: fromAddress.Hex(), - GasLimit: 1, - GasPrice: 1, - Hash: txHashTwo.Hex(), - Nonce: 1, - Raw: []byte{}, - To: toAddress.Hex(), - TxIndex: 1, - Value: "1", - }} - - insertErr := repo.CreateTransactions(headerID, transactions) - Expect(insertErr).NotTo(HaveOccurred()) - }) - - It("adds transactions", func() { - var dbTransactions []core.TransactionModel - err = db.Select(&dbTransactions, - `SELECT hash, gas_limit, gas_price, input_data, nonce, raw, tx_from, tx_index, tx_to, "value" - FROM public.header_sync_transactions WHERE header_id = $1`, headerID) - Expect(err).NotTo(HaveOccurred()) - Expect(dbTransactions).To(ConsistOf(transactions)) - }) - - It("silently ignores duplicate inserts", func() { - insertTwoErr := repo.CreateTransactions(headerID, transactions) - Expect(insertTwoErr).NotTo(HaveOccurred()) - - var dbTransactions []core.TransactionModel - err = db.Select(&dbTransactions, - `SELECT hash, gas_limit, gas_price, input_data, nonce, raw, tx_from, tx_index, tx_to, "value" - FROM public.header_sync_transactions WHERE header_id = $1`, headerID) - Expect(err).NotTo(HaveOccurred()) - Expect(len(dbTransactions)).To(Equal(2)) - }) - }) - - Describe("creating a transaction in a sqlx tx", func() { - It("adds a transaction", func() { - headerID, err := repo.CreateOrUpdateHeader(header) - Expect(err).NotTo(HaveOccurred()) - fromAddress := common.HexToAddress("0x1234") - toAddress := common.HexToAddress("0x5678") - txHash := common.HexToHash("0x9876") - txIndex := big.NewInt(123) - transaction := core.TransactionModel{ - Data: []byte{}, - From: fromAddress.Hex(), - GasLimit: 0, - GasPrice: 0, - Hash: txHash.Hex(), - Nonce: 0, - Raw: []byte{1, 2, 3}, - To: toAddress.Hex(), - TxIndex: txIndex.Int64(), - Value: "0", - } - - tx, err := db.Beginx() - Expect(err).ToNot(HaveOccurred()) - _, insertErr := repo.CreateTransactionInTx(tx, headerID, transaction) - commitErr := tx.Commit() - Expect(commitErr).ToNot(HaveOccurred()) - Expect(insertErr).NotTo(HaveOccurred()) - - var dbTransaction core.TransactionModel - err = db.Get(&dbTransaction, - `SELECT hash, gas_limit, gas_price, input_data, nonce, raw, tx_from, tx_index, tx_to, "value" - FROM public.header_sync_transactions WHERE header_id = $1`, headerID) - Expect(err).NotTo(HaveOccurred()) - Expect(dbTransaction).To(Equal(transaction)) - }) - - It("silently upserts", func() { - headerID, err := repo.CreateOrUpdateHeader(header) - Expect(err).NotTo(HaveOccurred()) - fromAddress := common.HexToAddress("0x1234") - toAddress := common.HexToAddress("0x5678") - txHash := common.HexToHash("0x9876") - txIndex := big.NewInt(123) - transaction := core.TransactionModel{ - Data: []byte{}, - From: fromAddress.Hex(), - GasLimit: 0, - GasPrice: 0, - Hash: txHash.Hex(), - Nonce: 0, - Raw: []byte{}, - Receipt: core.Receipt{}, - To: toAddress.Hex(), - TxIndex: txIndex.Int64(), - Value: "0", - } - - tx1, err := db.Beginx() - Expect(err).ToNot(HaveOccurred()) - txId1, insertErr := repo.CreateTransactionInTx(tx1, headerID, transaction) - commit1Err := tx1.Commit() - Expect(commit1Err).ToNot(HaveOccurred()) - Expect(insertErr).NotTo(HaveOccurred()) - - tx2, err := db.Beginx() - Expect(err).ToNot(HaveOccurred()) - txId2, insertErr := repo.CreateTransactionInTx(tx2, headerID, transaction) - commit2Err := tx2.Commit() - Expect(commit2Err).ToNot(HaveOccurred()) - Expect(insertErr).NotTo(HaveOccurred()) - Expect(txId1).To(Equal(txId2)) - - var dbTransactions []core.TransactionModel - err = db.Select(&dbTransactions, - `SELECT hash, gas_limit, gas_price, input_data, nonce, raw, tx_from, tx_index, tx_to, "value" - FROM public.header_sync_transactions WHERE header_id = $1`, headerID) - Expect(err).NotTo(HaveOccurred()) - Expect(len(dbTransactions)).To(Equal(1)) - }) - }) - - Describe("Getting a header", func() { - It("returns header if it exists", func() { - _, err = repo.CreateOrUpdateHeader(header) - Expect(err).NotTo(HaveOccurred()) - - dbHeader, err := repo.GetHeader(header.BlockNumber) - - Expect(err).NotTo(HaveOccurred()) - Expect(dbHeader.ID).NotTo(BeZero()) - Expect(dbHeader.BlockNumber).To(Equal(header.BlockNumber)) - Expect(dbHeader.Hash).To(Equal(header.Hash)) - Expect(dbHeader.Raw).To(MatchJSON(header.Raw)) - Expect(dbHeader.Timestamp).To(Equal(header.Timestamp)) - }) - - It("does not return header for a different node fingerprint", func() { - _, err = repo.CreateOrUpdateHeader(header) - Expect(err).NotTo(HaveOccurred()) - - nodeTwo := core.Node{ID: "FingerprintTwo"} - dbTwo, err := postgres.NewDB(test_config.DBConfig, nodeTwo) - Expect(err).NotTo(HaveOccurred()) - repoTwo := repositories.NewHeaderRepository(dbTwo) - - _, err = repoTwo.GetHeader(header.BlockNumber) - - Expect(err).To(HaveOccurred()) - Expect(err).To(MatchError(sql.ErrNoRows)) - }) - }) - - Describe("Getting missing headers", func() { - It("returns block numbers for headers not in the database", func() { - _, err = repo.CreateOrUpdateHeader(core.Header{ - BlockNumber: 1, - Raw: rawHeader, - Timestamp: timestamp, - }) - Expect(err).NotTo(HaveOccurred()) - - _, err = repo.CreateOrUpdateHeader(core.Header{ - BlockNumber: 3, - Raw: rawHeader, - Timestamp: timestamp, - }) - Expect(err).NotTo(HaveOccurred()) - - _, err = repo.CreateOrUpdateHeader(core.Header{ - BlockNumber: 5, - Raw: rawHeader, - Timestamp: timestamp, - }) - Expect(err).NotTo(HaveOccurred()) - - missingBlockNumbers, err := repo.MissingBlockNumbers(1, 5, db.Node.ID) - Expect(err).NotTo(HaveOccurred()) - - Expect(missingBlockNumbers).To(ConsistOf([]int64{2, 4})) - }) - - It("does not count headers created by a different node fingerprint", func() { - _, err = repo.CreateOrUpdateHeader(core.Header{ - BlockNumber: 1, - Raw: rawHeader, - Timestamp: timestamp, - }) - Expect(err).NotTo(HaveOccurred()) - - _, err = repo.CreateOrUpdateHeader(core.Header{ - BlockNumber: 3, - Raw: rawHeader, - Timestamp: timestamp, - }) - Expect(err).NotTo(HaveOccurred()) - - _, err = repo.CreateOrUpdateHeader(core.Header{ - BlockNumber: 5, - Raw: rawHeader, - Timestamp: timestamp, - }) - Expect(err).NotTo(HaveOccurred()) - - nodeTwo := core.Node{ID: "FingerprintTwo"} - dbTwo, err := postgres.NewDB(test_config.DBConfig, nodeTwo) - Expect(err).NotTo(HaveOccurred()) - repoTwo := repositories.NewHeaderRepository(dbTwo) - - missingBlockNumbers, err := repoTwo.MissingBlockNumbers(1, 5, nodeTwo.ID) - Expect(err).NotTo(HaveOccurred()) - - Expect(missingBlockNumbers).To(ConsistOf([]int64{1, 2, 3, 4, 5})) - }) - }) -}) diff --git a/pkg/eth/datastore/postgres/repositories/header_sync_log_repository.go b/pkg/eth/datastore/postgres/repositories/header_sync_log_repository.go deleted file mode 100644 index 7b5e162c..00000000 --- a/pkg/eth/datastore/postgres/repositories/header_sync_log_repository.go +++ /dev/null @@ -1,145 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package repositories - -import ( - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/core/types" - "github.com/jmoiron/sqlx" - "github.com/lib/pq" - "github.com/sirupsen/logrus" - "github.com/vulcanize/vulcanizedb/libraries/shared/repository" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" - "github.com/vulcanize/vulcanizedb/pkg/postgres" -) - -const insertHeaderSyncLogQuery = `INSERT INTO header_sync_logs - (header_id, address, topics, data, block_number, block_hash, tx_index, tx_hash, log_index, raw) - VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10) ON CONFLICT DO NOTHING` - -type HeaderSyncLogRepository struct { - db *postgres.DB -} - -func NewHeaderSyncLogRepository(db *postgres.DB) HeaderSyncLogRepository { - return HeaderSyncLogRepository{ - db: db, - } -} - -type headerSyncLog struct { - ID int64 - HeaderID int64 `db:"header_id"` - Address int64 - Topics pq.ByteaArray - Data []byte - BlockNumber uint64 `db:"block_number"` - BlockHash string `db:"block_hash"` - TxHash string `db:"tx_hash"` - TxIndex uint `db:"tx_index"` - LogIndex uint `db:"log_index"` - Transformed bool - Raw []byte -} - -func (repo HeaderSyncLogRepository) GetUntransformedHeaderSyncLogs() ([]core.HeaderSyncLog, error) { - rows, queryErr := repo.db.Queryx(`SELECT * FROM public.header_sync_logs WHERE transformed = false`) - if queryErr != nil { - return nil, queryErr - } - - var results []core.HeaderSyncLog - for rows.Next() { - var rawLog headerSyncLog - scanErr := rows.StructScan(&rawLog) - if scanErr != nil { - return nil, scanErr - } - var logTopics []common.Hash - for _, topic := range rawLog.Topics { - logTopics = append(logTopics, common.BytesToHash(topic)) - } - address, addrErr := repository.GetAddressByID(repo.db, rawLog.Address) - if addrErr != nil { - return nil, addrErr - } - reconstructedLog := types.Log{ - Address: common.HexToAddress(address), - Topics: logTopics, - Data: rawLog.Data, - BlockNumber: rawLog.BlockNumber, - TxHash: common.HexToHash(rawLog.TxHash), - TxIndex: rawLog.TxIndex, - BlockHash: common.HexToHash(rawLog.BlockHash), - Index: rawLog.LogIndex, - // TODO: revisit if not cascade deleting logs when header removed - // currently, fetched logs are cascade deleted if removed - Removed: false, - } - result := core.HeaderSyncLog{ - ID: rawLog.ID, - HeaderID: rawLog.HeaderID, - Log: reconstructedLog, - Transformed: rawLog.Transformed, - } - // TODO: Consider returning each result async to avoid keeping large result sets in memory - results = append(results, result) - } - - return results, nil -} - -func (repo HeaderSyncLogRepository) CreateHeaderSyncLogs(headerID int64, logs []types.Log) error { - tx, txErr := repo.db.Beginx() - if txErr != nil { - return txErr - } - for _, log := range logs { - err := repo.insertLog(headerID, log, tx) - if err != nil { - rollbackErr := tx.Rollback() - if rollbackErr != nil { - logrus.Errorf("failed to rollback header sync log insert: %s", rollbackErr.Error()) - } - return err - } - } - return tx.Commit() -} - -func (repo HeaderSyncLogRepository) insertLog(headerID int64, log types.Log, tx *sqlx.Tx) error { - topics := buildTopics(log) - raw, jsonErr := log.MarshalJSON() - if jsonErr != nil { - return jsonErr - } - addressID, addrErr := repository.GetOrCreateAddressInTransaction(tx, log.Address.Hex()) - if addrErr != nil { - return addrErr - } - _, insertErr := tx.Exec(insertHeaderSyncLogQuery, headerID, addressID, topics, log.Data, log.BlockNumber, - log.BlockHash.Hex(), log.TxIndex, log.TxHash.Hex(), log.Index, raw) - return insertErr -} - -func buildTopics(log types.Log) pq.ByteaArray { - var topics pq.ByteaArray - for _, topic := range log.Topics { - topics = append(topics, topic.Bytes()) - } - return topics -} diff --git a/pkg/eth/datastore/postgres/repositories/header_sync_log_repository_test.go b/pkg/eth/datastore/postgres/repositories/header_sync_log_repository_test.go deleted file mode 100644 index 7c19fb6b..00000000 --- a/pkg/eth/datastore/postgres/repositories/header_sync_log_repository_test.go +++ /dev/null @@ -1,213 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package repositories_test - -import ( - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/core/types" - "github.com/lib/pq" - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - repository2 "github.com/vulcanize/vulcanizedb/libraries/shared/repository" - "github.com/vulcanize/vulcanizedb/libraries/shared/test_data" - "github.com/vulcanize/vulcanizedb/pkg/eth/datastore" - "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres/repositories" - "github.com/vulcanize/vulcanizedb/pkg/eth/fakes" - "github.com/vulcanize/vulcanizedb/pkg/postgres" - "github.com/vulcanize/vulcanizedb/test_config" -) - -var _ = Describe("Header sync log repository", func() { - var ( - db *postgres.DB - headerID int64 - repository datastore.HeaderSyncLogRepository - ) - - BeforeEach(func() { - db = test_config.NewTestDB(test_config.NewTestNode()) - test_config.CleanTestDB(db) - headerRepository := repositories.NewHeaderRepository(db) - var headerErr error - headerID, headerErr = headerRepository.CreateOrUpdateHeader(fakes.FakeHeader) - Expect(headerErr).NotTo(HaveOccurred()) - repository = repositories.NewHeaderSyncLogRepository(db) - }) - - AfterEach(func() { - closeErr := db.Close() - Expect(closeErr).NotTo(HaveOccurred()) - }) - - Describe("CreateHeaderSyncLogs", func() { - type headerSyncLog struct { - ID int64 - HeaderID int64 `db:"header_id"` - Address int64 - Topics pq.ByteaArray - Data []byte - BlockNumber uint64 `db:"block_number"` - BlockHash string `db:"block_hash"` - TxHash string `db:"tx_hash"` - TxIndex uint `db:"tx_index"` - LogIndex uint `db:"log_index"` - Transformed bool - Raw []byte - } - - It("writes a log to the db", func() { - log := test_data.GenericTestLog() - - err := repository.CreateHeaderSyncLogs(headerID, []types.Log{log}) - - Expect(err).NotTo(HaveOccurred()) - var dbLog headerSyncLog - lookupErr := db.Get(&dbLog, `SELECT * FROM header_sync_logs`) - Expect(lookupErr).NotTo(HaveOccurred()) - Expect(dbLog.ID).NotTo(BeZero()) - Expect(dbLog.HeaderID).To(Equal(headerID)) - actualAddress, addressErr := repository2.GetAddressByID(db, dbLog.Address) - Expect(addressErr).NotTo(HaveOccurred()) - Expect(actualAddress).To(Equal(log.Address.Hex())) - Expect(dbLog.Topics[0]).To(Equal(log.Topics[0].Bytes())) - Expect(dbLog.Topics[1]).To(Equal(log.Topics[1].Bytes())) - Expect(dbLog.Data).To(Equal(log.Data)) - Expect(dbLog.BlockNumber).To(Equal(log.BlockNumber)) - Expect(dbLog.BlockHash).To(Equal(log.BlockHash.Hex())) - Expect(dbLog.TxIndex).To(Equal(log.TxIndex)) - Expect(dbLog.TxHash).To(Equal(log.TxHash.Hex())) - Expect(dbLog.LogIndex).To(Equal(log.Index)) - expectedRaw, jsonErr := log.MarshalJSON() - Expect(jsonErr).NotTo(HaveOccurred()) - Expect(dbLog.Raw).To(MatchJSON(expectedRaw)) - Expect(dbLog.Transformed).To(BeFalse()) - }) - - It("writes several logs to the db", func() { - log1 := test_data.GenericTestLog() - log2 := test_data.GenericTestLog() - logs := []types.Log{log1, log2} - - err := repository.CreateHeaderSyncLogs(headerID, logs) - - Expect(err).NotTo(HaveOccurred()) - var count int - lookupErr := db.Get(&count, `SELECT COUNT(*) FROM header_sync_logs`) - Expect(lookupErr).NotTo(HaveOccurred()) - Expect(count).To(Equal(len(logs))) - }) - - It("persists record that can be unpacked into types.Log", func() { - // important if we want to decouple log persistence from transforming and still make use of - // tools on types.Log like abi.Unpack - log := test_data.GenericTestLog() - - err := repository.CreateHeaderSyncLogs(headerID, []types.Log{log}) - - Expect(err).NotTo(HaveOccurred()) - var dbLog headerSyncLog - lookupErr := db.Get(&dbLog, `SELECT * FROM header_sync_logs`) - Expect(lookupErr).NotTo(HaveOccurred()) - - var logTopics []common.Hash - for _, topic := range dbLog.Topics { - logTopics = append(logTopics, common.BytesToHash(topic)) - } - - actualAddress, addressErr := repository2.GetAddressByID(db, dbLog.Address) - Expect(addressErr).NotTo(HaveOccurred()) - reconstructedLog := types.Log{ - Address: common.HexToAddress(actualAddress), - Topics: logTopics, - Data: dbLog.Data, - BlockNumber: dbLog.BlockNumber, - TxHash: common.HexToHash(dbLog.TxHash), - TxIndex: dbLog.TxIndex, - BlockHash: common.HexToHash(dbLog.BlockHash), - Index: dbLog.LogIndex, - Removed: false, - } - Expect(reconstructedLog).To(Equal(log)) - }) - - It("does not duplicate logs", func() { - log := test_data.GenericTestLog() - - err := repository.CreateHeaderSyncLogs(headerID, []types.Log{log, log}) - - Expect(err).NotTo(HaveOccurred()) - var count int - lookupErr := db.Get(&count, `SELECT COUNT(*) FROM header_sync_logs`) - Expect(lookupErr).NotTo(HaveOccurred()) - Expect(count).To(Equal(1)) - }) - }) - - Describe("GetUntransformedHeaderSyncLogs", func() { - Describe("when there are no logs", func() { - It("returns empty collection", func() { - result, err := repository.GetUntransformedHeaderSyncLogs() - - Expect(err).NotTo(HaveOccurred()) - Expect(len(result)).To(BeZero()) - }) - }) - - Describe("when there are logs", func() { - var log1, log2 types.Log - - BeforeEach(func() { - log1 = test_data.GenericTestLog() - log2 = test_data.GenericTestLog() - logs := []types.Log{log1, log2} - logsErr := repository.CreateHeaderSyncLogs(headerID, logs) - Expect(logsErr).NotTo(HaveOccurred()) - }) - - It("returns persisted logs", func() { - result, err := repository.GetUntransformedHeaderSyncLogs() - - Expect(err).NotTo(HaveOccurred()) - Expect(len(result)).To(Equal(2)) - Expect(result[0].Log).To(Or(Equal(log1), Equal(log2))) - Expect(result[1].Log).To(Or(Equal(log1), Equal(log2))) - Expect(result[0].Log).NotTo(Equal(result[1].Log)) - }) - - It("excludes logs that have been transformed", func() { - _, insertErr := db.Exec(`UPDATE public.header_sync_logs SET transformed = true WHERE tx_hash = $1`, log1.TxHash.Hex()) - Expect(insertErr).NotTo(HaveOccurred()) - - result, err := repository.GetUntransformedHeaderSyncLogs() - - Expect(err).NotTo(HaveOccurred()) - Expect(len(result)).To(Equal(1)) - Expect(result[0].Log).To(Equal(log2)) - }) - - It("returns empty collection if all logs transformed", func() { - _, insertErr := db.Exec(`UPDATE public.header_sync_logs SET transformed = true WHERE header_id = $1`, headerID) - Expect(insertErr).NotTo(HaveOccurred()) - - result, err := repository.GetUntransformedHeaderSyncLogs() - - Expect(err).NotTo(HaveOccurred()) - Expect(len(result)).To(BeZero()) - }) - }) - }) -}) diff --git a/pkg/eth/datastore/postgres/repositories/header_sync_receipt_repository.go b/pkg/eth/datastore/postgres/repositories/header_sync_receipt_repository.go deleted file mode 100644 index 579c7e12..00000000 --- a/pkg/eth/datastore/postgres/repositories/header_sync_receipt_repository.go +++ /dev/null @@ -1,47 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package repositories - -import ( - "github.com/ethereum/go-ethereum/log" - "github.com/jmoiron/sqlx" - "github.com/vulcanize/vulcanizedb/libraries/shared/repository" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" -) - -type HeaderSyncReceiptRepository struct{} - -func (HeaderSyncReceiptRepository) CreateHeaderSyncReceiptInTx(headerID, transactionID int64, receipt core.Receipt, tx *sqlx.Tx) (int64, error) { - var receiptID int64 - addressID, getAddressErr := repository.GetOrCreateAddressInTransaction(tx, receipt.ContractAddress) - if getAddressErr != nil { - log.Error("createReceipt: Error getting address id: ", getAddressErr) - return receiptID, getAddressErr - } - err := tx.QueryRowx(`INSERT INTO public.header_sync_receipts - (header_id, transaction_id, contract_address_id, cumulative_gas_used, gas_used, state_root, status, tx_hash, rlp) - VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9) - ON CONFLICT (header_id, transaction_id) DO UPDATE - SET (contract_address_id, cumulative_gas_used, gas_used, state_root, status, tx_hash, rlp) = ($3, $4::NUMERIC, $5::NUMERIC, $6, $7, $8, $9) - RETURNING id`, - headerID, transactionID, addressID, receipt.CumulativeGasUsed, receipt.GasUsed, receipt.StateRoot, receipt.Status, receipt.TxHash, receipt.Rlp).Scan(&receiptID) - if err != nil { - log.Error("header_repository: error inserting receipt: ", err) - return receiptID, err - } - return receiptID, err -} diff --git a/pkg/eth/datastore/postgres/repositories/header_sync_receipt_repository_test.go b/pkg/eth/datastore/postgres/repositories/header_sync_receipt_repository_test.go deleted file mode 100644 index c7494a9a..00000000 --- a/pkg/eth/datastore/postgres/repositories/header_sync_receipt_repository_test.go +++ /dev/null @@ -1,133 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package repositories_test - -import ( - "encoding/json" - "math/big" - - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/core/types" - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - - "github.com/vulcanize/vulcanizedb/pkg/eth/core" - "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres/repositories" - "github.com/vulcanize/vulcanizedb/pkg/postgres" - "github.com/vulcanize/vulcanizedb/test_config" -) - -var _ = Describe("Header Sync Receipt Repo", func() { - var ( - rawHeader []byte - err error - timestamp string - db *postgres.DB - receiptRepo repositories.HeaderSyncReceiptRepository - headerRepo repositories.HeaderRepository - header core.Header - ) - - BeforeEach(func() { - rawHeader, err = json.Marshal(types.Header{}) - Expect(err).NotTo(HaveOccurred()) - timestamp = big.NewInt(123456789).String() - - db = test_config.NewTestDB(test_config.NewTestNode()) - test_config.CleanTestDB(db) - receiptRepo = repositories.HeaderSyncReceiptRepository{} - headerRepo = repositories.NewHeaderRepository(db) - header = core.Header{ - BlockNumber: 100, - Hash: common.BytesToHash([]byte{1, 2, 3, 4, 5}).Hex(), - Raw: rawHeader, - Timestamp: timestamp, - } - }) - Describe("creating a receipt", func() { - It("adds a receipt in a tx", func() { - headerID, err := headerRepo.CreateOrUpdateHeader(header) - Expect(err).NotTo(HaveOccurred()) - fromAddress := common.HexToAddress("0x1234") - toAddress := common.HexToAddress("0x5678") - txHash := common.HexToHash("0x9876") - txIndex := big.NewInt(123) - transaction := core.TransactionModel{ - Data: []byte{}, - From: fromAddress.Hex(), - GasLimit: 0, - GasPrice: 0, - Hash: txHash.Hex(), - Nonce: 0, - Raw: []byte{}, - To: toAddress.Hex(), - TxIndex: txIndex.Int64(), - Value: "0", - } - tx, err := db.Beginx() - Expect(err).ToNot(HaveOccurred()) - txId, txErr := headerRepo.CreateTransactionInTx(tx, headerID, transaction) - Expect(txErr).ToNot(HaveOccurred()) - - contractAddr := common.HexToAddress("0x1234") - stateRoot := common.HexToHash("0x5678") - receipt := core.Receipt{ - ContractAddress: contractAddr.Hex(), - TxHash: txHash.Hex(), - GasUsed: 10, - CumulativeGasUsed: 100, - StateRoot: stateRoot.Hex(), - Rlp: []byte{1, 2, 3}, - } - - _, receiptErr := receiptRepo.CreateHeaderSyncReceiptInTx(headerID, txId, receipt, tx) - Expect(receiptErr).ToNot(HaveOccurred()) - commitErr := tx.Commit() - Expect(commitErr).ToNot(HaveOccurred()) - - type idModel struct { - TransactionId int64 `db:"transaction_id"` - ContractAddressId int64 `db:"contract_address_id"` - CumulativeGasUsed uint64 `db:"cumulative_gas_used"` - GasUsed uint64 `db:"gas_used"` - StateRoot string `db:"state_root"` - Status int - TxHash string `db:"tx_hash"` - Rlp []byte `db:"rlp"` - } - - var addressId int64 - getAddressErr := db.Get(&addressId, `SELECT id FROM addresses WHERE address = $1`, contractAddr.Hex()) - Expect(getAddressErr).NotTo(HaveOccurred()) - - var dbReceipt idModel - getReceiptErr := db.Get(&dbReceipt, - `SELECT transaction_id, contract_address_id, cumulative_gas_used, gas_used, state_root, status, tx_hash, rlp - FROM public.header_sync_receipts WHERE header_id = $1`, headerID) - Expect(getReceiptErr).NotTo(HaveOccurred()) - - Expect(dbReceipt.TransactionId).To(Equal(txId)) - Expect(dbReceipt.TxHash).To(Equal(txHash.Hex())) - Expect(dbReceipt.ContractAddressId).To(Equal(addressId)) - Expect(dbReceipt.CumulativeGasUsed).To(Equal(uint64(100))) - Expect(dbReceipt.GasUsed).To(Equal(uint64(10))) - Expect(dbReceipt.StateRoot).To(Equal(stateRoot.Hex())) - Expect(dbReceipt.Status).To(Equal(0)) - Expect(dbReceipt.Rlp).To(Equal([]byte{1, 2, 3})) - }) - }) -}) diff --git a/pkg/eth/datastore/postgres/repositories/repositories_suite_test.go b/pkg/eth/datastore/postgres/repositories/repositories_suite_test.go deleted file mode 100644 index 3f9cf68b..00000000 --- a/pkg/eth/datastore/postgres/repositories/repositories_suite_test.go +++ /dev/null @@ -1,36 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package repositories_test - -import ( - "io/ioutil" - "testing" - - "github.com/sirupsen/logrus" - - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" -) - -func TestRepositories(t *testing.T) { - RegisterFailHandler(Fail) - RunSpecs(t, "Repositories Suite") -} - -var _ = BeforeSuite(func() { - logrus.SetOutput(ioutil.Discard) -}) diff --git a/pkg/eth/datastore/postgres/repositories/storage_diff_repository.go b/pkg/eth/datastore/postgres/repositories/storage_diff_repository.go deleted file mode 100644 index 81b1b183..00000000 --- a/pkg/eth/datastore/postgres/repositories/storage_diff_repository.go +++ /dev/null @@ -1,47 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package repositories - -import ( - "database/sql" - - "github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils" - "github.com/vulcanize/vulcanizedb/pkg/postgres" -) - -var ErrDuplicateDiff = sql.ErrNoRows - -type StorageDiffRepository struct { - db *postgres.DB -} - -func NewStorageDiffRepository(db *postgres.DB) StorageDiffRepository { - return StorageDiffRepository{db: db} -} - -func (repository StorageDiffRepository) CreateStorageDiff(input utils.StorageDiffInput) (int64, error) { - var storageDiffID int64 - row := repository.db.QueryRowx(`INSERT INTO public.storage_diff - (hashed_address, block_height, block_hash, storage_key, storage_value) VALUES ($1, $2, $3, $4, $5) - ON CONFLICT DO NOTHING RETURNING id`, input.HashedAddress.Bytes(), input.BlockHeight, input.BlockHash.Bytes(), - input.StorageKey.Bytes(), input.StorageValue.Bytes()) - err := row.Scan(&storageDiffID) - if err != nil && err == sql.ErrNoRows { - return 0, ErrDuplicateDiff - } - return storageDiffID, err -} diff --git a/pkg/eth/datastore/postgres/repositories/storage_diff_repository_test.go b/pkg/eth/datastore/postgres/repositories/storage_diff_repository_test.go deleted file mode 100644 index bd48e7bf..00000000 --- a/pkg/eth/datastore/postgres/repositories/storage_diff_repository_test.go +++ /dev/null @@ -1,83 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package repositories_test - -import ( - "database/sql" - "math/rand" - - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - "github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils" - "github.com/vulcanize/vulcanizedb/libraries/shared/test_data" - "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres/repositories" - "github.com/vulcanize/vulcanizedb/pkg/postgres" - "github.com/vulcanize/vulcanizedb/test_config" -) - -var _ = Describe("Storage diffs repository", func() { - var ( - db *postgres.DB - repo repositories.StorageDiffRepository - fakeStorageDiff utils.StorageDiffInput - ) - - BeforeEach(func() { - db = test_config.NewTestDB(test_config.NewTestNode()) - test_config.CleanTestDB(db) - repo = repositories.NewStorageDiffRepository(db) - fakeStorageDiff = utils.StorageDiffInput{ - HashedAddress: test_data.FakeHash(), - BlockHash: test_data.FakeHash(), - BlockHeight: rand.Int(), - StorageKey: test_data.FakeHash(), - StorageValue: test_data.FakeHash(), - } - }) - - Describe("CreateStorageDiff", func() { - It("adds a storage diff to the db, returning id", func() { - id, createErr := repo.CreateStorageDiff(fakeStorageDiff) - - Expect(createErr).NotTo(HaveOccurred()) - Expect(id).NotTo(BeZero()) - var persisted utils.PersistedStorageDiff - getErr := db.Get(&persisted, `SELECT * FROM public.storage_diff`) - Expect(getErr).NotTo(HaveOccurred()) - Expect(persisted.ID).To(Equal(id)) - Expect(persisted.HashedAddress).To(Equal(fakeStorageDiff.HashedAddress)) - Expect(persisted.BlockHash).To(Equal(fakeStorageDiff.BlockHash)) - Expect(persisted.BlockHeight).To(Equal(fakeStorageDiff.BlockHeight)) - Expect(persisted.StorageKey).To(Equal(fakeStorageDiff.StorageKey)) - Expect(persisted.StorageValue).To(Equal(fakeStorageDiff.StorageValue)) - }) - - It("does not duplicate storage diffs", func() { - _, createErr := repo.CreateStorageDiff(fakeStorageDiff) - Expect(createErr).NotTo(HaveOccurred()) - - _, createTwoErr := repo.CreateStorageDiff(fakeStorageDiff) - Expect(createTwoErr).To(HaveOccurred()) - Expect(createTwoErr).To(MatchError(sql.ErrNoRows)) - - var count int - getErr := db.Get(&count, `SELECT count(*) FROM public.storage_diff`) - Expect(getErr).NotTo(HaveOccurred()) - Expect(count).To(Equal(1)) - }) - }) -}) diff --git a/pkg/eth/datastore/repository.go b/pkg/eth/datastore/repository.go deleted file mode 100644 index fb629cce..00000000 --- a/pkg/eth/datastore/repository.go +++ /dev/null @@ -1,92 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package datastore - -import ( - "github.com/ethereum/go-ethereum/core/types" - "github.com/jmoiron/sqlx" - "github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" - "github.com/vulcanize/vulcanizedb/pkg/eth/filters" -) - -type AddressRepository interface { - GetOrCreateAddress(address string) (int, error) -} - -type BlockRepository interface { - GetBlock(blockNumber int64) (core.Block, error) - MissingBlockNumbers(startingBlockNumber, endingBlockNumber int64, nodeID string) []int64 - SetBlocksStatus(chainHead int64) error -} - -type CheckedHeadersRepository interface { - MarkHeaderChecked(headerID int64) error - MarkHeadersUnchecked(startingBlockNumber int64) error - UncheckedHeaders(startingBlockNumber, endingBlockNumber, checkCount int64) ([]core.Header, error) -} - -type CheckedLogsRepository interface { - AlreadyWatchingLog(addresses []string, topic0 string) (bool, error) - MarkLogWatched(addresses []string, topic0 string) error -} - -type ContractRepository interface { - CreateContract(contract core.Contract) error - GetContract(contractHash string) (core.Contract, error) - ContractExists(contractHash string) (bool, error) -} - -type FilterRepository interface { - CreateFilter(filter filters.LogFilter) error - GetFilter(name string) (filters.LogFilter, error) -} - -type FullSyncLogRepository interface { - CreateLogs(logs []core.FullSyncLog, receiptID int64) error - GetLogs(address string, blockNumber int64) ([]core.FullSyncLog, error) -} - -type HeaderRepository interface { - CreateOrUpdateHeader(header core.Header) (int64, error) - CreateTransactions(headerID int64, transactions []core.TransactionModel) error - GetHeader(blockNumber int64) (core.Header, error) - MissingBlockNumbers(startingBlockNumber, endingBlockNumber int64, nodeID string) ([]int64, error) -} - -type HeaderSyncLogRepository interface { - GetUntransformedHeaderSyncLogs() ([]core.HeaderSyncLog, error) - CreateHeaderSyncLogs(headerID int64, logs []types.Log) error -} - -type FullSyncReceiptRepository interface { - CreateReceiptsAndLogs(blockID int64, receipts []core.Receipt) error - CreateFullSyncReceiptInTx(blockID int64, receipt core.Receipt, tx *sqlx.Tx) (int64, error) - GetFullSyncReceipt(txHash string) (core.Receipt, error) -} - -type HeaderSyncReceiptRepository interface { - CreateFullSyncReceiptInTx(blockID int64, receipt core.Receipt, tx *sqlx.Tx) (int64, error) -} - -type StorageDiffRepository interface { - CreateStorageDiff(input utils.StorageDiffInput) (int64, error) -} - -type WatchedEventRepository interface { - GetWatchedEvents(name string) ([]*core.WatchedEvent, error) -} diff --git a/pkg/eth/eth_suite_test.go b/pkg/eth/eth_suite_test.go index b6c30dfa..a2831e54 100644 --- a/pkg/eth/eth_suite_test.go +++ b/pkg/eth/eth_suite_test.go @@ -17,13 +17,19 @@ package eth_test import ( + "io/ioutil" + "testing" + . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" - - "testing" + "github.com/sirupsen/logrus" ) -func TestGeth(t *testing.T) { +func TestETHSuperNode(t *testing.T) { RegisterFailHandler(Fail) - RunSpecs(t, "Eth Suite") + RunSpecs(t, "Super Node ETH Suite Test") } + +var _ = BeforeSuite(func() { + logrus.SetOutput(ioutil.Discard) +}) diff --git a/pkg/eth/fakes/checked_logs_repository.go b/pkg/eth/fakes/checked_logs_repository.go deleted file mode 100644 index 8506746a..00000000 --- a/pkg/eth/fakes/checked_logs_repository.go +++ /dev/null @@ -1,39 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package fakes - -type MockCheckedLogsRepository struct { - AlreadyWatchingLogAddresses []string - AlreadyWatchingLogError error - AlreadyWatchingLogReturn bool - AlreadyWatchingLogTopicZero string - MarkLogWatchedAddresses []string - MarkLogWatchedError error - MarkLogWatchedTopicZero string -} - -func (repository *MockCheckedLogsRepository) AlreadyWatchingLog(addresses []string, topic0 string) (bool, error) { - repository.AlreadyWatchingLogAddresses = addresses - repository.AlreadyWatchingLogTopicZero = topic0 - return repository.AlreadyWatchingLogReturn, repository.AlreadyWatchingLogError -} - -func (repository *MockCheckedLogsRepository) MarkLogWatched(addresses []string, topic0 string) error { - repository.MarkLogWatchedAddresses = addresses - repository.MarkLogWatchedTopicZero = topic0 - return repository.MarkLogWatchedError -} diff --git a/pkg/eth/fakes/data.go b/pkg/eth/fakes/data.go deleted file mode 100644 index f24a95f0..00000000 --- a/pkg/eth/fakes/data.go +++ /dev/null @@ -1,119 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package fakes - -import ( - "bytes" - "encoding/json" - "errors" - "math/rand" - "strconv" - "time" - - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/core/types" - - "github.com/vulcanize/vulcanizedb/pkg/eth/core" -) - -var ( - FakeAddress = common.HexToAddress("0x" + RandomString(40)) - FakeError = errors.New("failed") - FakeHash = common.BytesToHash([]byte{1, 2, 3, 4, 5}) - fakeTimestamp = rand.Int63n(1500000000) -) - -var rawFakeHeader, _ = json.Marshal(types.Header{}) -var FakeHeader = core.Header{ - Hash: FakeHash.String(), - Raw: rawFakeHeader, - Timestamp: strconv.FormatInt(fakeTimestamp, 10), -} - -func GetFakeHeader(blockNumber int64) core.Header { - return GetFakeHeaderWithTimestamp(fakeTimestamp, blockNumber) -} - -func GetFakeHeaderWithTimestamp(timestamp, blockNumber int64) core.Header { - return core.Header{ - Hash: FakeHash.String(), - BlockNumber: blockNumber, - Raw: rawFakeHeader, - Timestamp: strconv.FormatInt(timestamp, 10), - } -} - -var fakeTransaction types.Transaction -var rawTransaction bytes.Buffer -var _ = fakeTransaction.EncodeRLP(&rawTransaction) -var FakeTransaction = core.TransactionModel{ - Data: []byte{}, - From: "", - GasLimit: 0, - GasPrice: 0, - Hash: "", - Nonce: 0, - Raw: rawTransaction.Bytes(), - Receipt: core.Receipt{}, - To: "", - TxIndex: 0, - Value: "0", -} - -func GetFakeTransaction(hash string, receipt core.Receipt) core.TransactionModel { - gethTransaction := types.Transaction{} - var raw bytes.Buffer - err := gethTransaction.EncodeRLP(&raw) - if err != nil { - panic("failed to marshal transaction while creating test fake") - } - return core.TransactionModel{ - Data: []byte{}, - From: "", - GasLimit: 0, - GasPrice: 0, - Hash: hash, - Nonce: 0, - Raw: raw.Bytes(), - Receipt: receipt, - To: "", - TxIndex: 0, - Value: "0", - } -} - -func GetFakeUncle(hash, reward string) core.Uncle { - return core.Uncle{ - Miner: FakeAddress.String(), - Hash: hash, - Reward: reward, - Raw: rawFakeHeader, - Timestamp: strconv.FormatInt(fakeTimestamp, 10), - } -} - -func RandomString(length int) string { - var seededRand = rand.New( - rand.NewSource(time.Now().UnixNano())) - charset := "abcdef1234567890" - b := make([]byte, length) - for i := range b { - b[i] = charset[seededRand.Intn(len(charset))] - } - - return string(b) -} diff --git a/pkg/eth/fakes/mock_block_repository.go b/pkg/eth/fakes/mock_block_repository.go deleted file mode 100644 index ae93657d..00000000 --- a/pkg/eth/fakes/mock_block_repository.go +++ /dev/null @@ -1,118 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package fakes - -import ( - . "github.com/onsi/gomega" - - "github.com/vulcanize/vulcanizedb/pkg/eth/core" -) - -type MockBlockRepository struct { - createOrUpdateBlockCallCount int - createOrUpdateBlockCalled bool - createOrUpdateBlockPassedBlock core.Block - createOrUpdateBlockPassedBlockNumbers []int64 - createOrUpdateBlockReturnErr error - createOrUpdateBlockReturnInt int64 - missingBlockNumbersCalled bool - missingBlockNumbersPassedEndingBlockNumber int64 - missingBlockNumbersPassedNodeID string - missingBlockNumbersPassedStartingBlockNumber int64 - missingBlockNumbersReturnArray []int64 - setBlockStatusCalled bool - setBlockStatusPassedChainHead int64 -} - -func NewMockBlockRepository() *MockBlockRepository { - return &MockBlockRepository{ - createOrUpdateBlockCallCount: 0, - createOrUpdateBlockCalled: false, - createOrUpdateBlockPassedBlock: core.Block{}, - createOrUpdateBlockPassedBlockNumbers: nil, - createOrUpdateBlockReturnErr: nil, - createOrUpdateBlockReturnInt: 0, - missingBlockNumbersCalled: false, - missingBlockNumbersPassedEndingBlockNumber: 0, - missingBlockNumbersPassedNodeID: "", - missingBlockNumbersPassedStartingBlockNumber: 0, - missingBlockNumbersReturnArray: nil, - setBlockStatusCalled: false, - setBlockStatusPassedChainHead: 0, - } -} - -func (repository *MockBlockRepository) SetCreateOrUpdateBlockReturnVals(i int64, err error) { - repository.createOrUpdateBlockReturnInt = i - repository.createOrUpdateBlockReturnErr = err -} - -func (repository *MockBlockRepository) SetMissingBlockNumbersReturnArray(returnArray []int64) { - repository.missingBlockNumbersReturnArray = returnArray -} - -func (repository *MockBlockRepository) CreateOrUpdateBlock(block core.Block) (int64, error) { - repository.createOrUpdateBlockCallCount++ - repository.createOrUpdateBlockCalled = true - repository.createOrUpdateBlockPassedBlock = block - repository.createOrUpdateBlockPassedBlockNumbers = append(repository.createOrUpdateBlockPassedBlockNumbers, block.Number) - return repository.createOrUpdateBlockReturnInt, repository.createOrUpdateBlockReturnErr -} - -func (repository *MockBlockRepository) GetBlock(blockNumber int64) (core.Block, error) { - return core.Block{Number: blockNumber}, nil -} - -func (repository *MockBlockRepository) MissingBlockNumbers(startingBlockNumber int64, endingBlockNumber int64, nodeID string) []int64 { - repository.missingBlockNumbersCalled = true - repository.missingBlockNumbersPassedStartingBlockNumber = startingBlockNumber - repository.missingBlockNumbersPassedEndingBlockNumber = endingBlockNumber - repository.missingBlockNumbersPassedNodeID = nodeID - return repository.missingBlockNumbersReturnArray -} - -func (repository *MockBlockRepository) SetBlocksStatus(chainHead int64) error { - repository.setBlockStatusCalled = true - repository.setBlockStatusPassedChainHead = chainHead - return nil -} - -func (repository *MockBlockRepository) AssertCreateOrUpdateBlockCallCountEquals(times int) { - Expect(repository.createOrUpdateBlockCallCount).To(Equal(times)) -} - -func (repository *MockBlockRepository) AssertCreateOrUpdateBlocksCallCountAndBlockNumbersEquals(times int, blockNumbers []int64) { - Expect(repository.createOrUpdateBlockCallCount).To(Equal(times)) - Expect(repository.createOrUpdateBlockPassedBlockNumbers).To(Equal(blockNumbers)) -} - -func (repository *MockBlockRepository) AssertCreateOrUpdateBlockCalledWith(block core.Block) { - Expect(repository.createOrUpdateBlockCalled).To(BeTrue()) - Expect(repository.createOrUpdateBlockPassedBlock).To(Equal(block)) -} - -func (repository *MockBlockRepository) AssertMissingBlockNumbersCalledWith(startingBlockNumber int64, endingBlockNumber int64, nodeID string) { - Expect(repository.missingBlockNumbersCalled).To(BeTrue()) - Expect(repository.missingBlockNumbersPassedStartingBlockNumber).To(Equal(startingBlockNumber)) - Expect(repository.missingBlockNumbersPassedEndingBlockNumber).To(Equal(endingBlockNumber)) - Expect(repository.missingBlockNumbersPassedNodeID).To(Equal(nodeID)) -} - -func (repository *MockBlockRepository) AssertSetBlockStatusCalledWith(chainHead int64) { - Expect(repository.setBlockStatusCalled).To(BeTrue()) - Expect(repository.setBlockStatusPassedChainHead).To(Equal(chainHead)) -} diff --git a/pkg/eth/fakes/mock_blockchain.go b/pkg/eth/fakes/mock_blockchain.go deleted file mode 100644 index 9b2c84a5..00000000 --- a/pkg/eth/fakes/mock_blockchain.go +++ /dev/null @@ -1,158 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package fakes - -import ( - "math/big" - - "github.com/ethereum/go-ethereum/common" - - "github.com/ethereum/go-ethereum" - "github.com/ethereum/go-ethereum/core/types" - . "github.com/onsi/gomega" - - "github.com/vulcanize/vulcanizedb/pkg/eth/core" -) - -type MockBlockChain struct { - fetchContractDataErr error - fetchContractDataPassedAbi string - fetchContractDataPassedAddress string - fetchContractDataPassedMethod string - fetchContractDataPassedMethodArgs []interface{} - fetchContractDataPassedResult interface{} - fetchContractDataPassedBlockNumber int64 - getBlockByNumberErr error - GetTransactionsCalled bool - GetTransactionsError error - GetTransactionsPassedHashes []common.Hash - logQuery ethereum.FilterQuery - logQueryErr error - logQueryReturnLogs []types.Log - lastBlock *big.Int - node core.Node - Transactions []core.TransactionModel - accountBalanceReturnValue *big.Int - getAccountBalanceErr error -} - -func NewMockBlockChain() *MockBlockChain { - return &MockBlockChain{ - node: core.Node{GenesisBlock: "GENESIS", NetworkID: "1", ID: "x123", ClientName: "Geth"}, - } -} - -func (chain *MockBlockChain) SetFetchContractDataErr(err error) { - chain.fetchContractDataErr = err -} - -func (chain *MockBlockChain) SetLastBlock(blockNumber *big.Int) { - chain.lastBlock = blockNumber -} - -func (chain *MockBlockChain) SetGetBlockByNumberErr(err error) { - chain.getBlockByNumberErr = err -} - -func (chain *MockBlockChain) SetGetEthLogsWithCustomQueryErr(err error) { - chain.logQueryErr = err -} - -func (chain *MockBlockChain) SetGetEthLogsWithCustomQueryReturnLogs(logs []types.Log) { - chain.logQueryReturnLogs = logs -} - -func (chain *MockBlockChain) FetchContractData(abiJSON string, address string, method string, methodArgs []interface{}, result interface{}, blockNumber int64) error { - chain.fetchContractDataPassedAbi = abiJSON - chain.fetchContractDataPassedAddress = address - chain.fetchContractDataPassedMethod = method - chain.fetchContractDataPassedMethodArgs = methodArgs - chain.fetchContractDataPassedResult = result - chain.fetchContractDataPassedBlockNumber = blockNumber - return chain.fetchContractDataErr -} - -func (chain *MockBlockChain) GetBlockByNumber(blockNumber int64) (core.Block, error) { - return core.Block{Number: blockNumber}, chain.getBlockByNumberErr -} - -func (chain *MockBlockChain) GetEthLogsWithCustomQuery(query ethereum.FilterQuery) ([]types.Log, error) { - chain.logQuery = query - return chain.logQueryReturnLogs, chain.logQueryErr -} - -func (chain *MockBlockChain) GetHeaderByNumber(blockNumber int64) (core.Header, error) { - return core.Header{BlockNumber: blockNumber}, nil -} - -func (chain *MockBlockChain) GetHeadersByNumbers(blockNumbers []int64) ([]core.Header, error) { - var headers []core.Header - for _, blockNumber := range blockNumbers { - var header = core.Header{BlockNumber: int64(blockNumber)} - headers = append(headers, header) - } - return headers, nil -} - -func (chain *MockBlockChain) GetFullSyncLogs(contract core.Contract, startingBlockNumber, endingBlockNumber *big.Int) ([]core.FullSyncLog, error) { - return []core.FullSyncLog{}, nil -} - -func (chain *MockBlockChain) GetTransactions(transactionHashes []common.Hash) ([]core.TransactionModel, error) { - chain.GetTransactionsCalled = true - chain.GetTransactionsPassedHashes = transactionHashes - return chain.Transactions, chain.GetTransactionsError -} - -func (chain *MockBlockChain) CallContract(contractHash string, input []byte, blockNumber *big.Int) ([]byte, error) { - return []byte{}, nil -} - -func (chain *MockBlockChain) LastBlock() (*big.Int, error) { - return chain.lastBlock, nil -} - -func (chain *MockBlockChain) Node() core.Node { - return chain.node -} - -func (chain *MockBlockChain) AssertFetchContractDataCalledWith(abiJSON string, address string, method string, methodArgs []interface{}, result interface{}, blockNumber int64) { - Expect(chain.fetchContractDataPassedAbi).To(Equal(abiJSON)) - Expect(chain.fetchContractDataPassedAddress).To(Equal(address)) - Expect(chain.fetchContractDataPassedMethod).To(Equal(method)) - if methodArgs != nil { - Expect(chain.fetchContractDataPassedMethodArgs).To(Equal(methodArgs)) - } - Expect(chain.fetchContractDataPassedResult).To(BeAssignableToTypeOf(result)) - Expect(chain.fetchContractDataPassedBlockNumber).To(Equal(blockNumber)) -} - -func (chain *MockBlockChain) AssertGetEthLogsWithCustomQueryCalledWith(query ethereum.FilterQuery) { - Expect(chain.logQuery).To(Equal(query)) -} - -func (chain *MockBlockChain) SetGetAccountBalanceErr(err error) { - chain.getAccountBalanceErr = err -} - -func (chain *MockBlockChain) SetGetAccountBalance(balance *big.Int) { - chain.accountBalanceReturnValue = balance -} - -func (chain *MockBlockChain) GetAccountBalance(address common.Address, blockNumber *big.Int) (*big.Int, error) { - return chain.accountBalanceReturnValue, chain.getAccountBalanceErr -} diff --git a/pkg/eth/fakes/mock_checked_headers_repository.go b/pkg/eth/fakes/mock_checked_headers_repository.go deleted file mode 100644 index fa5000af..00000000 --- a/pkg/eth/fakes/mock_checked_headers_repository.go +++ /dev/null @@ -1,52 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package fakes - -import ( - "github.com/vulcanize/vulcanizedb/pkg/eth/core" -) - -type MockCheckedHeadersRepository struct { - MarkHeaderCheckedHeaderID int64 - MarkHeaderCheckedReturnError error - MarkHeadersUncheckedCalled bool - MarkHeadersUncheckedReturnError error - MarkHeadersUncheckedStartingBlockNumber int64 - UncheckedHeadersCheckCount int64 - UncheckedHeadersEndingBlockNumber int64 - UncheckedHeadersReturnError error - UncheckedHeadersReturnHeaders []core.Header - UncheckedHeadersStartingBlockNumber int64 -} - -func (repository *MockCheckedHeadersRepository) MarkHeadersUnchecked(startingBlockNumber int64) error { - repository.MarkHeadersUncheckedCalled = true - repository.MarkHeadersUncheckedStartingBlockNumber = startingBlockNumber - return repository.MarkHeadersUncheckedReturnError -} - -func (repository *MockCheckedHeadersRepository) MarkHeaderChecked(headerID int64) error { - repository.MarkHeaderCheckedHeaderID = headerID - return repository.MarkHeaderCheckedReturnError -} - -func (repository *MockCheckedHeadersRepository) UncheckedHeaders(startingBlockNumber, endingBlockNumber, checkCount int64) ([]core.Header, error) { - repository.UncheckedHeadersStartingBlockNumber = startingBlockNumber - repository.UncheckedHeadersEndingBlockNumber = endingBlockNumber - repository.UncheckedHeadersCheckCount = checkCount - return repository.UncheckedHeadersReturnHeaders, repository.UncheckedHeadersReturnError -} diff --git a/pkg/eth/fakes/mock_crypto_parser.go b/pkg/eth/fakes/mock_crypto_parser.go deleted file mode 100644 index d2644a24..00000000 --- a/pkg/eth/fakes/mock_crypto_parser.go +++ /dev/null @@ -1,54 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package fakes - -import . "github.com/onsi/gomega" - -type MockCryptoParser struct { - parsePublicKeyCalled bool - parsePublicKeyPassedPrivateKey string - parsePublicKeyReturnString string - parsePublicKeyReturnErr error -} - -func NewMockCryptoParser() *MockCryptoParser { - return &MockCryptoParser{ - parsePublicKeyCalled: false, - parsePublicKeyPassedPrivateKey: "", - parsePublicKeyReturnString: "", - parsePublicKeyReturnErr: nil, - } -} - -func (mcp *MockCryptoParser) SetReturnVal(pubKey string) { - mcp.parsePublicKeyReturnString = pubKey -} - -func (mcp *MockCryptoParser) SetReturnErr(err error) { - mcp.parsePublicKeyReturnErr = err -} - -func (mcp *MockCryptoParser) ParsePublicKey(privateKey string) (string, error) { - mcp.parsePublicKeyCalled = true - mcp.parsePublicKeyPassedPrivateKey = privateKey - return mcp.parsePublicKeyReturnString, mcp.parsePublicKeyReturnErr -} - -func (mcp *MockCryptoParser) AssertParsePublicKeyCalledWith(privateKey string) { - Expect(mcp.parsePublicKeyCalled).To(BeTrue()) - Expect(mcp.parsePublicKeyPassedPrivateKey).To(Equal(privateKey)) -} diff --git a/pkg/eth/fakes/mock_eth_client.go b/pkg/eth/fakes/mock_eth_client.go deleted file mode 100644 index 97e668a3..00000000 --- a/pkg/eth/fakes/mock_eth_client.go +++ /dev/null @@ -1,236 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package fakes - -import ( - "context" - "math/big" - - "github.com/ethereum/go-ethereum" - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/core/types" - . "github.com/onsi/gomega" - - "github.com/vulcanize/vulcanizedb/pkg/eth/client" -) - -type MockEthClient struct { - callContractErr error - callContractPassedContext context.Context - callContractPassedMsg ethereum.CallMsg - callContractPassedNumber *big.Int - callContractReturnBytes []byte - blockByNumberErr error - blockByNumberPassedContext context.Context - blockByNumberPassedNumber *big.Int - blockByNumberReturnBlock *types.Block - headerByNumberErr error - headerByNumberPassedContext context.Context - headerByNumberPassedNumber *big.Int - headerByNumberReturnHeader *types.Header - headerByNumbersReturnHeader []*types.Header - headerByNumbersPassedNumber []*big.Int - filterLogsErr error - filterLogsPassedContext context.Context - filterLogsPassedQuery ethereum.FilterQuery - filterLogsReturnLogs []types.Log - transactionReceipts map[string]*types.Receipt - err error - passedBatch []client.BatchElem - passedMethod string - transactionSenderErr error - transactionReceiptErr error - passedAddress common.Address - passedBlockNumber *big.Int - passedBalance *big.Int - balanceAtErr error - passedbalanceAtContext context.Context -} - -func NewMockEthClient() *MockEthClient { - return &MockEthClient{ - callContractErr: nil, - callContractPassedContext: nil, - callContractPassedMsg: ethereum.CallMsg{}, - callContractPassedNumber: nil, - callContractReturnBytes: nil, - blockByNumberErr: nil, - blockByNumberPassedContext: nil, - blockByNumberPassedNumber: nil, - blockByNumberReturnBlock: nil, - headerByNumberErr: nil, - headerByNumberPassedContext: nil, - headerByNumberPassedNumber: nil, - headerByNumberReturnHeader: nil, - filterLogsErr: nil, - filterLogsPassedContext: nil, - filterLogsPassedQuery: ethereum.FilterQuery{}, - filterLogsReturnLogs: nil, - transactionReceipts: make(map[string]*types.Receipt), - err: nil, - passedBatch: nil, - passedMethod: "123", - } -} - -func (client *MockEthClient) SetCallContractErr(err error) { - client.callContractErr = err -} - -func (client *MockEthClient) SetCallContractReturnBytes(returnBytes []byte) { - client.callContractReturnBytes = returnBytes -} - -func (client *MockEthClient) SetBlockByNumberErr(err error) { - client.blockByNumberErr = err -} - -func (client *MockEthClient) SetBlockByNumberReturnBlock(block *types.Block) { - client.blockByNumberReturnBlock = block -} - -func (client *MockEthClient) SetHeaderByNumberErr(err error) { - client.headerByNumberErr = err -} - -func (client *MockEthClient) SetHeaderByNumberReturnHeader(header *types.Header) { - client.headerByNumberReturnHeader = header -} - -func (client *MockEthClient) SetHeaderByNumbersReturnHeader(headers []*types.Header) { - client.headerByNumbersReturnHeader = headers -} - -func (client *MockEthClient) SetFilterLogsErr(err error) { - client.filterLogsErr = err -} - -func (client *MockEthClient) SetFilterLogsReturnLogs(logs []types.Log) { - client.filterLogsReturnLogs = logs -} - -func (client *MockEthClient) SetTransactionReceiptErr(err error) { - client.transactionReceiptErr = err -} - -func (client *MockEthClient) SetTransactionReceipts(receipts []*types.Receipt) { - for _, receipt := range receipts { - client.transactionReceipts[receipt.TxHash.Hex()] = receipt - } -} - -func (client *MockEthClient) SetTransactionSenderErr(err error) { - client.transactionSenderErr = err -} - -func (client *MockEthClient) CallContract(ctx context.Context, msg ethereum.CallMsg, blockNumber *big.Int) ([]byte, error) { - client.callContractPassedContext = ctx - client.callContractPassedMsg = msg - client.callContractPassedNumber = blockNumber - return client.callContractReturnBytes, client.callContractErr -} - -func (client *MockEthClient) BatchCall(batch []client.BatchElem) error { - client.passedBatch = batch - client.passedMethod = batch[0].Method - - return nil -} - -func (client *MockEthClient) BlockByNumber(ctx context.Context, number *big.Int) (*types.Block, error) { - client.blockByNumberPassedContext = ctx - client.blockByNumberPassedNumber = number - return client.blockByNumberReturnBlock, client.blockByNumberErr -} - -func (client *MockEthClient) HeaderByNumber(ctx context.Context, number *big.Int) (*types.Header, error) { - client.headerByNumberPassedContext = ctx - client.headerByNumberPassedNumber = number - return client.headerByNumberReturnHeader, client.headerByNumberErr -} - -func (client *MockEthClient) HeaderByNumbers(numbers []*big.Int) ([]*types.Header, error) { - client.headerByNumbersPassedNumber = numbers - return client.headerByNumbersReturnHeader, client.headerByNumberErr -} - -func (client *MockEthClient) FilterLogs(ctx context.Context, q ethereum.FilterQuery) ([]types.Log, error) { - client.filterLogsPassedContext = ctx - client.filterLogsPassedQuery = q - return client.filterLogsReturnLogs, client.filterLogsErr -} - -func (client *MockEthClient) TransactionSender(ctx context.Context, tx *types.Transaction, block common.Hash, index uint) (common.Address, error) { - return common.HexToAddress("0x123"), client.transactionSenderErr -} - -func (client *MockEthClient) TransactionReceipt(ctx context.Context, txHash common.Hash) (*types.Receipt, error) { - if gasUsed, ok := client.transactionReceipts[txHash.Hex()]; ok { - return gasUsed, client.transactionReceiptErr - } - return &types.Receipt{GasUsed: uint64(0)}, client.transactionReceiptErr -} - -func (client *MockEthClient) AssertCallContractCalledWith(ctx context.Context, msg ethereum.CallMsg, blockNumber *big.Int) { - Expect(client.callContractPassedContext).To(Equal(ctx)) - Expect(client.callContractPassedMsg).To(Equal(msg)) - Expect(client.callContractPassedNumber).To(Equal(blockNumber)) -} - -func (client *MockEthClient) AssertBlockByNumberCalledWith(ctx context.Context, number *big.Int) { - Expect(client.blockByNumberPassedContext).To(Equal(ctx)) - Expect(client.blockByNumberPassedNumber).To(Equal(number)) -} - -func (client *MockEthClient) AssertHeaderByNumberCalledWith(ctx context.Context, number *big.Int) { - Expect(client.headerByNumberPassedContext).To(Equal(ctx)) - Expect(client.headerByNumberPassedNumber).To(Equal(number)) -} - -func (client *MockEthClient) AssertHeaderByNumbersCalledWith(number []*big.Int) { - Expect(client.headerByNumbersPassedNumber).To(Equal(number)) -} - -func (client *MockEthClient) AssertFilterLogsCalledWith(ctx context.Context, q ethereum.FilterQuery) { - Expect(client.filterLogsPassedContext).To(Equal(ctx)) - Expect(client.filterLogsPassedQuery).To(Equal(q)) -} - -func (client *MockEthClient) AssertBatchCalledWith(method string) { - Expect(client.passedMethod).To(Equal(method)) -} - -func (client *MockEthClient) SetBalanceAtErr(err error) { - client.balanceAtErr = err -} - -func (client *MockEthClient) SetBalanceAt(balance *big.Int) { - client.passedBalance = balance -} - -func (client *MockEthClient) BalanceAt(ctx context.Context, account common.Address, blockNumber *big.Int) (*big.Int, error) { - client.passedbalanceAtContext = ctx - client.passedAddress = account - client.passedBlockNumber = blockNumber - return client.passedBalance, client.balanceAtErr -} - -func (client *MockEthClient) AssertBalanceAtCalled(ctx context.Context, account common.Address, blockNumber *big.Int) { - Expect(client.passedbalanceAtContext).To(Equal(ctx)) - Expect(client.passedAddress).To(Equal(account)) - Expect(client.passedBlockNumber).To(Equal(blockNumber)) -} diff --git a/pkg/eth/fakes/mock_ethereum_database.go b/pkg/eth/fakes/mock_ethereum_database.go deleted file mode 100644 index c1424304..00000000 --- a/pkg/eth/fakes/mock_ethereum_database.go +++ /dev/null @@ -1,111 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package fakes - -import ( - . "github.com/onsi/gomega" - - "github.com/ethereum/go-ethereum/core/types" -) - -type MockEthereumDatabase struct { - getBlockCalled bool - getBlockPassedHash []byte - getBlockPassedNumber int64 - getBlockReturnBlock *types.Block - getBlockHashCalled bool - getBlockHashPassedNumber int64 - getBlockHashReturnHash []byte - getBlockReceiptsCalled bool - getBlockReceiptsPassedHash []byte - getBlockReceiptsPassedNumber int64 - getBlockReceiptsReturnReceipts types.Receipts - getHeadBlockNumberCalled bool - getHeadBlockNumberReturnVal int64 -} - -func NewMockEthereumDatabase() *MockEthereumDatabase { - return &MockEthereumDatabase{ - getBlockCalled: false, - getBlockPassedHash: nil, - getBlockPassedNumber: 0, - getBlockReturnBlock: nil, - getBlockHashCalled: false, - getBlockHashPassedNumber: 0, - getBlockHashReturnHash: nil, - getBlockReceiptsCalled: false, - getBlockReceiptsPassedHash: nil, - getBlockReceiptsPassedNumber: 0, - getBlockReceiptsReturnReceipts: nil, - getHeadBlockNumberCalled: false, - getHeadBlockNumberReturnVal: 0, - } -} - -func (med *MockEthereumDatabase) SetReturnBlock(block *types.Block) { - med.getBlockReturnBlock = block -} - -func (med *MockEthereumDatabase) SetReturnHash(hash []byte) { - med.getBlockHashReturnHash = hash -} - -func (med *MockEthereumDatabase) SetReturnReceipts(receipts types.Receipts) { - med.getBlockReceiptsReturnReceipts = receipts -} - -func (med *MockEthereumDatabase) GetBlock(hash []byte, blockNumber int64) *types.Block { - med.getBlockCalled = true - med.getBlockPassedHash = hash - med.getBlockPassedNumber = blockNumber - return med.getBlockReturnBlock -} - -func (med *MockEthereumDatabase) GetBlockHash(blockNumber int64) []byte { - med.getBlockHashCalled = true - med.getBlockHashPassedNumber = blockNumber - return med.getBlockHashReturnHash -} - -func (med *MockEthereumDatabase) GetBlockReceipts(blockHash []byte, blockNumber int64) types.Receipts { - med.getBlockReceiptsCalled = true - med.getBlockReceiptsPassedHash = blockHash - med.getBlockReceiptsPassedNumber = blockNumber - return med.getBlockReceiptsReturnReceipts -} - -func (med *MockEthereumDatabase) GetHeadBlockNumber() int64 { - med.getHeadBlockNumberCalled = true - return med.getHeadBlockNumberReturnVal -} - -func (med *MockEthereumDatabase) AssertGetBlockCalledWith(hash []byte, blockNumber int64) { - Expect(med.getBlockCalled).To(BeTrue()) - Expect(med.getBlockPassedHash).To(Equal(hash)) - Expect(med.getBlockPassedNumber).To(Equal(blockNumber)) -} - -func (med *MockEthereumDatabase) AssertGetBlockHashCalledWith(blockNumber int64) { - Expect(med.getBlockHashCalled).To(BeTrue()) - Expect(med.getBlockHashPassedNumber).To(Equal(blockNumber)) -} - -func (med *MockEthereumDatabase) AssertGetBlockReceiptsCalledWith(blockHash []byte, blockNumber int64) { - Expect(med.getBlockReceiptsCalled).To(BeTrue()) - Expect(med.getBlockReceiptsPassedHash).To(Equal(blockHash)) - Expect(med.getBlockReceiptsPassedNumber).To(Equal(blockNumber)) -} diff --git a/pkg/eth/fakes/mock_filter_repository.go b/pkg/eth/fakes/mock_filter_repository.go deleted file mode 100644 index eb95a793..00000000 --- a/pkg/eth/fakes/mock_filter_repository.go +++ /dev/null @@ -1,30 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package fakes - -import "github.com/vulcanize/vulcanizedb/pkg/eth/filters" - -type MockFilterRepository struct { -} - -func (*MockFilterRepository) CreateFilter(filter filters.LogFilter) error { - return nil -} - -func (*MockFilterRepository) GetFilter(name string) (filters.LogFilter, error) { - panic("implement me") -} diff --git a/pkg/eth/fakes/mock_fs_reader.go b/pkg/eth/fakes/mock_fs_reader.go deleted file mode 100644 index b1bfafda..00000000 --- a/pkg/eth/fakes/mock_fs_reader.go +++ /dev/null @@ -1,54 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package fakes - -import . "github.com/onsi/gomega" - -type MockFsReader struct { - readCalled bool - readPassedPath string - readReturnBytes []byte - readReturnErr error -} - -func NewMockFsReader() *MockFsReader { - return &MockFsReader{ - readCalled: false, - readPassedPath: "", - readReturnBytes: nil, - readReturnErr: nil, - } -} - -func (mfr *MockFsReader) SetReturnBytes(returnBytes []byte) { - mfr.readReturnBytes = returnBytes -} - -func (mfr *MockFsReader) SetReturnErr(err error) { - mfr.readReturnErr = err -} - -func (mfr *MockFsReader) Read(path string) ([]byte, error) { - mfr.readCalled = true - mfr.readPassedPath = path - return mfr.readReturnBytes, mfr.readReturnErr -} - -func (mfr *MockFsReader) AssertReadCalledWith(path string) { - Expect(mfr.readCalled).To(BeTrue()) - Expect(mfr.readPassedPath).To(Equal(path)) -} diff --git a/pkg/eth/fakes/mock_full_sync_block_retriever.go b/pkg/eth/fakes/mock_full_sync_block_retriever.go deleted file mode 100644 index 9d8e9461..00000000 --- a/pkg/eth/fakes/mock_full_sync_block_retriever.go +++ /dev/null @@ -1,15 +0,0 @@ -package fakes - -type MockFullSyncBlockRetriever struct { - FirstBlock int64 - FirstBlockErr error - MostRecentBlock int64 -} - -func (retriever *MockFullSyncBlockRetriever) RetrieveFirstBlock(contractAddr string) (int64, error) { - return retriever.FirstBlock, retriever.FirstBlockErr -} - -func (retriever *MockFullSyncBlockRetriever) RetrieveMostRecentBlock() (int64, error) { - return retriever.MostRecentBlock, nil -} diff --git a/pkg/eth/fakes/mock_header_repository.go b/pkg/eth/fakes/mock_header_repository.go deleted file mode 100644 index e0002d96..00000000 --- a/pkg/eth/fakes/mock_header_repository.go +++ /dev/null @@ -1,86 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package fakes - -import ( - . "github.com/onsi/gomega" - - "github.com/vulcanize/vulcanizedb/pkg/eth/core" -) - -type MockHeaderRepository struct { - createOrUpdateHeaderCallCount int - createOrUpdateHeaderErr error - createOrUpdateHeaderPassedBlockNumbers []int64 - createOrUpdateHeaderReturnID int64 - CreateTransactionsCalled bool - CreateTransactionsError error - getHeaderError error - getHeaderReturnBlockHash string - missingBlockNumbers []int64 - headerExists bool - GetHeaderPassedBlockNumber int64 -} - -func NewMockHeaderRepository() *MockHeaderRepository { - return &MockHeaderRepository{} -} - -func (repository *MockHeaderRepository) SetCreateOrUpdateHeaderReturnID(id int64) { - repository.createOrUpdateHeaderReturnID = id -} - -func (repository *MockHeaderRepository) SetCreateOrUpdateHeaderReturnErr(err error) { - repository.createOrUpdateHeaderErr = err -} - -func (repository *MockHeaderRepository) SetMissingBlockNumbers(blockNumbers []int64) { - repository.missingBlockNumbers = blockNumbers -} - -func (repository *MockHeaderRepository) CreateOrUpdateHeader(header core.Header) (int64, error) { - repository.createOrUpdateHeaderCallCount++ - repository.createOrUpdateHeaderPassedBlockNumbers = append(repository.createOrUpdateHeaderPassedBlockNumbers, header.BlockNumber) - return repository.createOrUpdateHeaderReturnID, repository.createOrUpdateHeaderErr -} - -func (repository *MockHeaderRepository) CreateTransactions(headerID int64, transactions []core.TransactionModel) error { - repository.CreateTransactionsCalled = true - return repository.CreateTransactionsError -} - -func (repository *MockHeaderRepository) GetHeader(blockNumber int64) (core.Header, error) { - repository.GetHeaderPassedBlockNumber = blockNumber - return core.Header{BlockNumber: blockNumber, Hash: repository.getHeaderReturnBlockHash}, repository.getHeaderError -} - -func (repository *MockHeaderRepository) MissingBlockNumbers(startingBlockNumber, endingBlockNumber int64, nodeID string) ([]int64, error) { - return repository.missingBlockNumbers, nil -} - -func (repository *MockHeaderRepository) SetGetHeaderError(err error) { - repository.getHeaderError = err -} - -func (repository *MockHeaderRepository) SetGetHeaderReturnBlockHash(hash string) { - repository.getHeaderReturnBlockHash = hash -} - -func (repository *MockHeaderRepository) AssertCreateOrUpdateHeaderCallCountAndPassedBlockNumbers(times int, blockNumbers []int64) { - Expect(repository.createOrUpdateHeaderCallCount).To(Equal(times)) - Expect(repository.createOrUpdateHeaderPassedBlockNumbers).To(Equal(blockNumbers)) -} diff --git a/pkg/eth/fakes/mock_header_sync_block_retriever.go b/pkg/eth/fakes/mock_header_sync_block_retriever.go deleted file mode 100644 index 81f6bd6d..00000000 --- a/pkg/eth/fakes/mock_header_sync_block_retriever.go +++ /dev/null @@ -1,14 +0,0 @@ -package fakes - -type MockHeaderSyncBlockRetriever struct { - FirstBlock int64 - FirstBlockErr error -} - -func (retriever *MockHeaderSyncBlockRetriever) RetrieveFirstBlock() (int64, error) { - return retriever.FirstBlock, retriever.FirstBlockErr -} - -func (retriever *MockHeaderSyncBlockRetriever) RetrieveMostRecentBlock() (int64, error) { - return 0, nil -} diff --git a/pkg/eth/fakes/mock_header_sync_header_repository.go b/pkg/eth/fakes/mock_header_sync_header_repository.go deleted file mode 100644 index 1dc5d79f..00000000 --- a/pkg/eth/fakes/mock_header_sync_header_repository.go +++ /dev/null @@ -1,42 +0,0 @@ -package fakes - -import "github.com/vulcanize/vulcanizedb/pkg/eth/core" - -type MockHeaderSyncHeaderRepository struct { -} - -func (*MockHeaderSyncHeaderRepository) AddCheckColumn(id string) error { - return nil -} - -func (*MockHeaderSyncHeaderRepository) AddCheckColumns(ids []string) error { - panic("implement me") -} - -func (*MockHeaderSyncHeaderRepository) MarkHeaderChecked(headerID int64, eventID string) error { - panic("implement me") -} - -func (*MockHeaderSyncHeaderRepository) MarkHeaderCheckedForAll(headerID int64, ids []string) error { - panic("implement me") -} - -func (*MockHeaderSyncHeaderRepository) MarkHeadersCheckedForAll(headers []core.Header, ids []string) error { - panic("implement me") -} - -func (*MockHeaderSyncHeaderRepository) MissingHeaders(startingBlockNumber int64, endingBlockNumber int64, eventID string) ([]core.Header, error) { - panic("implement me") -} - -func (*MockHeaderSyncHeaderRepository) MissingMethodsCheckedEventsIntersection(startingBlockNumber, endingBlockNumber int64, methodIds, eventIds []string) ([]core.Header, error) { - panic("implement me") -} - -func (*MockHeaderSyncHeaderRepository) MissingHeadersForAll(startingBlockNumber, endingBlockNumber int64, ids []string) ([]core.Header, error) { - panic("implement me") -} - -func (*MockHeaderSyncHeaderRepository) CheckCache(key string) (interface{}, bool) { - panic("implement me") -} diff --git a/pkg/eth/fakes/mock_header_sync_log_repository.go b/pkg/eth/fakes/mock_header_sync_log_repository.go deleted file mode 100644 index 6cd7e890..00000000 --- a/pkg/eth/fakes/mock_header_sync_log_repository.go +++ /dev/null @@ -1,42 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package fakes - -import ( - "github.com/ethereum/go-ethereum/core/types" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" -) - -type MockHeaderSyncLogRepository struct { - CreateError error - GetCalled bool - GetError error - PassedHeaderID int64 - PassedLogs []types.Log - ReturnLogs []core.HeaderSyncLog -} - -func (repository *MockHeaderSyncLogRepository) GetUntransformedHeaderSyncLogs() ([]core.HeaderSyncLog, error) { - repository.GetCalled = true - return repository.ReturnLogs, repository.GetError -} - -func (repository *MockHeaderSyncLogRepository) CreateHeaderSyncLogs(headerID int64, logs []types.Log) error { - repository.PassedHeaderID = headerID - repository.PassedLogs = logs - return repository.CreateError -} diff --git a/pkg/eth/fakes/mock_level_database_reader.go b/pkg/eth/fakes/mock_level_database_reader.go deleted file mode 100644 index 78f99987..00000000 --- a/pkg/eth/fakes/mock_level_database_reader.go +++ /dev/null @@ -1,142 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package fakes - -import ( - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/core/types" - . "github.com/onsi/gomega" -) - -type MockLevelDatabaseReader struct { - getBlockCalled bool - getBlockNumberCalled bool - getBlockNumberPassedHash common.Hash - getBlockPassedHash common.Hash - getBlockPassedNumber uint64 - getBlockReceiptsCalled bool - getBlockReceiptsPassedHash common.Hash - getBlockReceiptsPassedNumber uint64 - getCanonicalHashCalled bool - getCanonicalHashPassedNumber uint64 - getCanonicalHashReturnHash common.Hash - getHeadBlockHashCalled bool - getHeadBlockHashReturnHash common.Hash - passedHash common.Hash - returnBlock *types.Block - returnBlockNumber uint64 - returnReceipts types.Receipts -} - -func NewMockLevelDatabaseReader() *MockLevelDatabaseReader { - return &MockLevelDatabaseReader{ - getBlockCalled: false, - getBlockNumberCalled: false, - getBlockNumberPassedHash: common.Hash{}, - getBlockPassedHash: common.Hash{}, - getBlockPassedNumber: 0, - getBlockReceiptsCalled: false, - getBlockReceiptsPassedHash: common.Hash{}, - getBlockReceiptsPassedNumber: 0, - getCanonicalHashCalled: false, - getCanonicalHashPassedNumber: 0, - getCanonicalHashReturnHash: common.Hash{}, - getHeadBlockHashCalled: false, - getHeadBlockHashReturnHash: common.Hash{}, - passedHash: common.Hash{}, - returnBlock: nil, - returnBlockNumber: 0, - returnReceipts: nil, - } -} - -func (mldr *MockLevelDatabaseReader) SetReturnBlock(block *types.Block) { - mldr.returnBlock = block -} - -func (mldr *MockLevelDatabaseReader) SetReturnBlockNumber(n uint64) { - mldr.returnBlockNumber = n -} - -func (mldr *MockLevelDatabaseReader) SetGetCanonicalHashReturnHash(hash common.Hash) { - mldr.getCanonicalHashReturnHash = hash -} - -func (mldr *MockLevelDatabaseReader) SetHeadBlockHashReturnHash(hash common.Hash) { - mldr.getHeadBlockHashReturnHash = hash -} - -func (mldr *MockLevelDatabaseReader) SetReturnReceipts(receipts types.Receipts) { - mldr.returnReceipts = receipts -} - -func (mldr *MockLevelDatabaseReader) GetBlock(hash common.Hash, number uint64) *types.Block { - mldr.getBlockCalled = true - mldr.getBlockPassedHash = hash - mldr.getBlockPassedNumber = number - return mldr.returnBlock -} - -func (mldr *MockLevelDatabaseReader) GetBlockReceipts(hash common.Hash, number uint64) types.Receipts { - mldr.getBlockReceiptsCalled = true - mldr.getBlockReceiptsPassedHash = hash - mldr.getBlockReceiptsPassedNumber = number - return mldr.returnReceipts -} - -func (mldr *MockLevelDatabaseReader) GetBlockNumber(hash common.Hash) *uint64 { - mldr.getBlockNumberCalled = true - mldr.getBlockNumberPassedHash = hash - return &mldr.returnBlockNumber -} - -func (mldr *MockLevelDatabaseReader) GetCanonicalHash(number uint64) common.Hash { - mldr.getCanonicalHashCalled = true - mldr.getCanonicalHashPassedNumber = number - return mldr.getCanonicalHashReturnHash -} - -func (mldr *MockLevelDatabaseReader) GetHeadBlockHash() common.Hash { - mldr.getHeadBlockHashCalled = true - return mldr.getHeadBlockHashReturnHash -} - -func (mldr *MockLevelDatabaseReader) AssertGetBlockCalledWith(hash common.Hash, number uint64) { - Expect(mldr.getBlockCalled).To(BeTrue()) - Expect(mldr.getBlockPassedHash).To(Equal(hash)) - Expect(mldr.getBlockPassedNumber).To(Equal(number)) -} - -func (mldr *MockLevelDatabaseReader) AssertGetBlockNumberCalledWith(hash common.Hash) { - Expect(mldr.getBlockNumberCalled).To(BeTrue()) - Expect(mldr.getBlockNumberPassedHash).To(Equal(hash)) -} - -func (mldr *MockLevelDatabaseReader) AssertGetBlockReceiptsCalledWith(hash common.Hash, number uint64) { - Expect(mldr.getBlockReceiptsCalled).To(BeTrue()) - Expect(mldr.getBlockReceiptsPassedHash).To(Equal(hash)) - Expect(mldr.getBlockReceiptsPassedNumber).To(Equal(number)) -} - -func (mldr *MockLevelDatabaseReader) AssertGetCanonicalHashCalledWith(number uint64) { - Expect(mldr.getCanonicalHashCalled).To(BeTrue()) - Expect(mldr.getCanonicalHashPassedNumber).To(Equal(number)) -} - -func (mldr *MockLevelDatabaseReader) AssertGetHeadBlockHashCalled() { - Expect(mldr.getHeadBlockHashCalled).To(BeTrue()) -} diff --git a/pkg/eth/fakes/mock_parser.go b/pkg/eth/fakes/mock_parser.go deleted file mode 100644 index 6a8ef9a9..00000000 --- a/pkg/eth/fakes/mock_parser.go +++ /dev/null @@ -1,57 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package fakes - -import ( - "github.com/ethereum/go-ethereum/accounts/abi" - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/types" -) - -type MockParser struct { - AbiToReturn string - EventName string - Event types.Event -} - -func (*MockParser) Parse(contractAddr string) error { - return nil -} - -func (parser *MockParser) ParseAbiStr(abiStr string) error { - parser.AbiToReturn = abiStr - return nil -} - -func (parser *MockParser) Abi() string { - return parser.AbiToReturn -} - -func (*MockParser) ParsedAbi() abi.ABI { - return abi.ABI{} -} - -func (*MockParser) GetMethods(wanted []string) []types.Method { - panic("implement me") -} - -func (*MockParser) GetSelectMethods(wanted []string) []types.Method { - return []types.Method{} -} - -func (parser *MockParser) GetEvents(wanted []string) map[string]types.Event { - return map[string]types.Event{parser.EventName: parser.Event} -} diff --git a/pkg/eth/fakes/mock_poller.go b/pkg/eth/fakes/mock_poller.go deleted file mode 100644 index 3814ce5b..00000000 --- a/pkg/eth/fakes/mock_poller.go +++ /dev/null @@ -1,40 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package fakes - -import ( - "github.com/vulcanize/vulcanizedb/pkg/eth/contract_watcher/shared/contract" -) - -type MockPoller struct { - ContractName string -} - -func (*MockPoller) PollContract(con contract.Contract, lastBlock int64) error { - panic("implement me") -} - -func (*MockPoller) PollContractAt(con contract.Contract, blockNumber int64) error { - panic("implement me") -} - -func (poller *MockPoller) FetchContractData(contractAbi, contractAddress, method string, methodArgs []interface{}, result interface{}, blockNumber int64) error { - if p, ok := result.(*string); ok { - *p = poller.ContractName - } - return nil -} diff --git a/pkg/eth/fakes/mock_receipt_repository.go b/pkg/eth/fakes/mock_receipt_repository.go deleted file mode 100644 index 1a9e5209..00000000 --- a/pkg/eth/fakes/mock_receipt_repository.go +++ /dev/null @@ -1,69 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package fakes - -import ( - "github.com/jmoiron/sqlx" - . "github.com/onsi/gomega" - - "github.com/vulcanize/vulcanizedb/pkg/eth/core" -) - -type MockReceiptRepository struct { - createReceiptsAndLogsCalled bool - createReceiptsAndLogsPassedBlockID int64 - createReceiptsAndLogsPassedReceipts []core.Receipt - createReceiptsAndLogsReturnErr error -} - -func NewMockReceiptRepository() *MockReceiptRepository { - return &MockReceiptRepository{ - createReceiptsAndLogsCalled: false, - createReceiptsAndLogsPassedBlockID: 0, - createReceiptsAndLogsPassedReceipts: nil, - createReceiptsAndLogsReturnErr: nil, - } -} - -func (mrr *MockReceiptRepository) SetCreateReceiptsAndLogsReturnErr(err error) { - mrr.createReceiptsAndLogsReturnErr = err -} - -func (mrr *MockReceiptRepository) CreateReceiptsAndLogs(blockID int64, receipts []core.Receipt) error { - mrr.createReceiptsAndLogsCalled = true - mrr.createReceiptsAndLogsPassedBlockID = blockID - mrr.createReceiptsAndLogsPassedReceipts = receipts - return mrr.createReceiptsAndLogsReturnErr -} - -func (mrr *MockReceiptRepository) CreateFullSyncReceiptInTx(blockID int64, receipt core.Receipt, tx *sqlx.Tx) (int64, error) { - panic("implement me") -} - -func (mrr *MockReceiptRepository) GetFullSyncReceipt(txHash string) (core.Receipt, error) { - panic("implement me") -} - -func (mrr *MockReceiptRepository) AssertCreateReceiptsAndLogsCalledWith(blockID int64, receipts []core.Receipt) { - Expect(mrr.createReceiptsAndLogsCalled).To(BeTrue()) - Expect(mrr.createReceiptsAndLogsPassedBlockID).To(Equal(blockID)) - Expect(mrr.createReceiptsAndLogsPassedReceipts).To(Equal(receipts)) -} - -func (mrr *MockReceiptRepository) AssertCreateReceiptsAndLogsNotCalled() { - Expect(mrr.createReceiptsAndLogsCalled).To(BeFalse()) -} diff --git a/pkg/eth/fakes/mock_rpc_client.go b/pkg/eth/fakes/mock_rpc_client.go deleted file mode 100644 index 0827b87e..00000000 --- a/pkg/eth/fakes/mock_rpc_client.go +++ /dev/null @@ -1,182 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package fakes - -import ( - "context" - "math/big" - - "github.com/ethereum/go-ethereum/core/types" - "github.com/ethereum/go-ethereum/p2p" - "github.com/ethereum/go-ethereum/rpc" - . "github.com/onsi/gomega" - - "github.com/vulcanize/vulcanizedb/pkg/eth/client" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" -) - -type MockRPCClient struct { - callContextErr error - ipcPath string - nodeType core.NodeType - passedContext context.Context - passedMethod string - passedResult interface{} - passedBatch []client.BatchElem - passedNamespace string - passedPayloadChan interface{} - passedSubscribeArgs []interface{} - lengthOfBatch int - returnPOAHeader core.POAHeader - returnPOAHeaders []core.POAHeader - returnPOWHeaders []*types.Header - supportedModules map[string]string -} - -func (client *MockRPCClient) Subscribe(namespace string, payloadChan interface{}, args ...interface{}) (*rpc.ClientSubscription, error) { - client.passedNamespace = namespace - client.passedPayloadChan = payloadChan - - for _, arg := range args { - client.passedSubscribeArgs = append(client.passedSubscribeArgs, arg) - } - - subscription := rpc.ClientSubscription{} - return &subscription, nil -} - -func (client *MockRPCClient) AssertSubscribeCalledWith(namespace string, payloadChan interface{}, args []interface{}) { - Expect(client.passedNamespace).To(Equal(namespace)) - Expect(client.passedPayloadChan).To(Equal(payloadChan)) - Expect(client.passedSubscribeArgs).To(Equal(args)) -} - -func NewMockRPCClient() *MockRPCClient { - return &MockRPCClient{} -} - -func (client *MockRPCClient) SetIpcPath(ipcPath string) { - client.ipcPath = ipcPath -} - -func (client *MockRPCClient) BatchCall(batch []client.BatchElem) error { - client.passedBatch = batch - client.passedMethod = batch[0].Method - client.lengthOfBatch = len(batch) - - for _, batchElem := range batch { - client.passedContext = context.Background() - client.passedResult = &batchElem.Result - client.passedMethod = batchElem.Method - if p, ok := batchElem.Result.(*types.Header); ok { - *p = types.Header{Number: big.NewInt(100)} - } - if p, ok := batchElem.Result.(*core.POAHeader); ok { - - *p = client.returnPOAHeader - } - } - - return nil -} - -func (client *MockRPCClient) CallContext(ctx context.Context, result interface{}, method string, args ...interface{}) error { - client.passedContext = ctx - client.passedResult = result - client.passedMethod = method - switch method { - case "admin_nodeInfo": - if p, ok := result.(*p2p.NodeInfo); ok { - p.ID = "enode://GethNode@172.17.0.1:30303" - p.Name = "Geth/v1.7" - } - case "eth_getBlockByNumber": - if p, ok := result.(*types.Header); ok { - *p = types.Header{Number: big.NewInt(100)} - } - if p, ok := result.(*core.POAHeader); ok { - - *p = client.returnPOAHeader - } - if client.callContextErr != nil { - return client.callContextErr - } - case "parity_versionInfo": - if p, ok := result.(*core.ParityNodeInfo); ok { - *p = core.ParityNodeInfo{ - Track: "", - ParityVersion: core.ParityVersion{ - Major: 1, - Minor: 2, - Patch: 3, - }, - Hash: "", - } - } - case "parity_enode": - if p, ok := result.(*string); ok { - *p = "enode://ParityNode@172.17.0.1:30303" - } - case "net_version": - if p, ok := result.(*string); ok { - *p = "1234" - } - } - return nil -} - -func (client *MockRPCClient) IpcPath() string { - return client.ipcPath -} - -func (client *MockRPCClient) SupportedModules() (map[string]string, error) { - return client.supportedModules, nil -} - -func (client *MockRPCClient) SetSupporedModules(supportedModules map[string]string) { - client.supportedModules = supportedModules -} - -func (client *MockRPCClient) SetCallContextErr(err error) { - client.callContextErr = err -} - -func (client *MockRPCClient) SetReturnPOAHeader(header core.POAHeader) { - client.returnPOAHeader = header -} - -func (client *MockRPCClient) SetReturnPOWHeaders(headers []*types.Header) { - client.returnPOWHeaders = headers -} - -func (client *MockRPCClient) SetReturnPOAHeaders(headers []core.POAHeader) { - client.returnPOAHeaders = headers -} - -func (client *MockRPCClient) AssertCallContextCalledWith(ctx context.Context, result interface{}, method string) { - Expect(client.passedContext).To(Equal(ctx)) - Expect(client.passedResult).To(BeAssignableToTypeOf(result)) - Expect(client.passedMethod).To(Equal(method)) -} - -func (client *MockRPCClient) AssertBatchCalledWith(method string, lengthOfBatch int) { - Expect(client.lengthOfBatch).To(Equal(lengthOfBatch)) - for _, batch := range client.passedBatch { - Expect(batch.Method).To(Equal(method)) - } - Expect(client.passedMethod).To(Equal(method)) -} diff --git a/pkg/eth/fakes/mock_storage_diff_repository.go b/pkg/eth/fakes/mock_storage_diff_repository.go deleted file mode 100644 index 642286aa..00000000 --- a/pkg/eth/fakes/mock_storage_diff_repository.go +++ /dev/null @@ -1,32 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package fakes - -import ( - "github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils" -) - -type MockStorageDiffRepository struct { - CreatePassedInputs []utils.StorageDiffInput - CreateReturnID int64 - CreateReturnError error -} - -func (repository *MockStorageDiffRepository) CreateStorageDiff(input utils.StorageDiffInput) (int64, error) { - repository.CreatePassedInputs = append(repository.CreatePassedInputs, input) - return repository.CreateReturnID, repository.CreateReturnError -} diff --git a/pkg/eth/fakes/mock_tailer.go b/pkg/eth/fakes/mock_tailer.go deleted file mode 100644 index e30fb922..00000000 --- a/pkg/eth/fakes/mock_tailer.go +++ /dev/null @@ -1,43 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package fakes - -import ( - "github.com/hpcloud/tail" - "gopkg.in/tomb.v1" -) - -type MockTailer struct { - Lines chan *tail.Line - TailErr error -} - -func NewMockTailer() *MockTailer { - return &MockTailer{ - Lines: make(chan *tail.Line, 1), - } -} - -func (mock *MockTailer) Tail() (*tail.Tail, error) { - fakeTail := &tail.Tail{ - Filename: "", - Lines: mock.Lines, - Config: tail.Config{}, - Tomb: tomb.Tomb{}, - } - return fakeTail, mock.TailErr -} diff --git a/pkg/eth/fakes/mock_transaction_converter.go b/pkg/eth/fakes/mock_transaction_converter.go deleted file mode 100644 index f7b47020..00000000 --- a/pkg/eth/fakes/mock_transaction_converter.go +++ /dev/null @@ -1,47 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package fakes - -import ( - "github.com/ethereum/go-ethereum/core/types" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" -) - -type MockTransactionConverter struct { - ConvertHeaderTransactionIndexToIntCalled bool - ConvertBlockTransactionsToCoreCalled bool - ConvertBlockTransactionsToCorePassedBlock *types.Block -} - -func NewMockTransactionConverter() *MockTransactionConverter { - return &MockTransactionConverter{ - ConvertHeaderTransactionIndexToIntCalled: false, - ConvertBlockTransactionsToCoreCalled: false, - ConvertBlockTransactionsToCorePassedBlock: nil, - } -} - -func (converter *MockTransactionConverter) ConvertBlockTransactionsToCore(gethBlock *types.Block) ([]core.TransactionModel, error) { - converter.ConvertBlockTransactionsToCoreCalled = true - converter.ConvertBlockTransactionsToCorePassedBlock = gethBlock - return []core.TransactionModel{}, nil -} - -func (converter *MockTransactionConverter) ConvertRPCTransactionsToModels(transactions []core.RPCTransaction) ([]core.TransactionModel, error) { - converter.ConvertHeaderTransactionIndexToIntCalled = true - return nil, nil -} diff --git a/pkg/eth/fakes/mock_transaction_syncer.go b/pkg/eth/fakes/mock_transaction_syncer.go deleted file mode 100644 index e5b49a9c..00000000 --- a/pkg/eth/fakes/mock_transaction_syncer.go +++ /dev/null @@ -1,29 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package fakes - -import "github.com/ethereum/go-ethereum/core/types" - -type MockTransactionSyncer struct { - SyncTransactionsCalled bool - SyncTransactionsError error -} - -func (syncer *MockTransactionSyncer) SyncTransactions(headerID int64, logs []types.Log) error { - syncer.SyncTransactionsCalled = true - return syncer.SyncTransactionsError -} diff --git a/pkg/super_node/eth/filterer.go b/pkg/eth/filterer.go similarity index 98% rename from pkg/super_node/eth/filterer.go rename to pkg/eth/filterer.go index 0407ac7c..748526db 100644 --- a/pkg/super_node/eth/filterer.go +++ b/pkg/eth/filterer.go @@ -20,17 +20,16 @@ import ( "bytes" "fmt" - "github.com/ethereum/go-ethereum/statediff" - "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/core/types" "github.com/ethereum/go-ethereum/crypto" "github.com/ethereum/go-ethereum/rlp" + "github.com/ethereum/go-ethereum/statediff" "github.com/multiformats/go-multihash" - "github.com/vulcanize/vulcanizedb/pkg/ipfs" - "github.com/vulcanize/vulcanizedb/pkg/ipfs/ipld" - "github.com/vulcanize/vulcanizedb/pkg/super_node/shared" + "github.com/vulcanize/ipfs-chain-watcher/pkg/ipfs" + "github.com/vulcanize/ipfs-chain-watcher/pkg/ipfs/ipld" + "github.com/vulcanize/ipfs-chain-watcher/pkg/shared" ) // ResponseFilterer satisfies the ResponseFilterer interface for ethereum diff --git a/pkg/super_node/eth/filterer_test.go b/pkg/eth/filterer_test.go similarity index 97% rename from pkg/super_node/eth/filterer_test.go rename to pkg/eth/filterer_test.go index 82e40bc0..b7fc64cc 100644 --- a/pkg/super_node/eth/filterer_test.go +++ b/pkg/eth/filterer_test.go @@ -23,10 +23,10 @@ import ( . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" - "github.com/vulcanize/vulcanizedb/pkg/ipfs" - "github.com/vulcanize/vulcanizedb/pkg/super_node/eth" - "github.com/vulcanize/vulcanizedb/pkg/super_node/eth/mocks" - "github.com/vulcanize/vulcanizedb/pkg/super_node/shared" + "github.com/vulcanize/ipfs-chain-watcher/pkg/eth" + "github.com/vulcanize/ipfs-chain-watcher/pkg/eth/mocks" + "github.com/vulcanize/ipfs-chain-watcher/pkg/ipfs" + "github.com/vulcanize/ipfs-chain-watcher/pkg/shared" ) var ( diff --git a/pkg/eth/filters/filter_query.go b/pkg/eth/filters/filter_query.go deleted file mode 100644 index 38ce8d96..00000000 --- a/pkg/eth/filters/filter_query.go +++ /dev/null @@ -1,80 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package filters - -import ( - "encoding/json" - - "errors" - - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/common/hexutil" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" -) - -type LogFilters []LogFilter - -type LogFilter struct { - Name string `json:"name"` - FromBlock int64 `json:"fromBlock" db:"from_block"` - ToBlock int64 `json:"toBlock" db:"to_block"` - Address string `json:"address"` - core.Topics `json:"topics"` -} - -func (filterQuery *LogFilter) UnmarshalJSON(input []byte) error { - type Alias LogFilter - - var err error - aux := &struct { - ToBlock string `json:"toBlock"` - FromBlock string `json:"fromBlock"` - *Alias - }{ - Alias: (*Alias)(filterQuery), - } - if err = json.Unmarshal(input, &aux); err != nil { - return err - } - if filterQuery.Name == "" { - return errors.New("filters: must provide name for logfilter") - } - filterQuery.ToBlock, err = filterQuery.unmarshalFromToBlock(aux.ToBlock) - if err != nil { - return errors.New("filters: invalid fromBlock") - } - filterQuery.FromBlock, err = filterQuery.unmarshalFromToBlock(aux.FromBlock) - if err != nil { - return errors.New("filters: invalid fromBlock") - } - if !common.IsHexAddress(filterQuery.Address) { - return errors.New("filters: invalid address") - } - - return nil -} - -func (filterQuery *LogFilter) unmarshalFromToBlock(auxBlock string) (int64, error) { - if auxBlock == "" { - return -1, nil - } - block, err := hexutil.DecodeUint64(auxBlock) - if err != nil { - return 0, errors.New("filters: invalid block arg") - } - return int64(block), nil -} diff --git a/pkg/eth/filters/filter_test.go b/pkg/eth/filters/filter_test.go deleted file mode 100644 index 32c5a90c..00000000 --- a/pkg/eth/filters/filter_test.go +++ /dev/null @@ -1,141 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package filters_test - -import ( - "encoding/json" - - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" - "github.com/vulcanize/vulcanizedb/pkg/eth/filters" -) - -var _ = Describe("Log filters", func() { - It("decodes web3 filter to LogFilter", func() { - - var logFilter filters.LogFilter - jsonFilter := []byte( - `{ - "name": "TestEvent", - "fromBlock": "0x1", - "toBlock": "0x488290", - "address": "0x8888f1f195afa192cfee860698584c030f4c9db1", - "topics": ["0x000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b", null, "0x000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b", null] - }`) - err := json.Unmarshal(jsonFilter, &logFilter) - - Expect(err).ToNot(HaveOccurred()) - Expect(logFilter.Name).To(Equal("TestEvent")) - Expect(logFilter.FromBlock).To(Equal(int64(1))) - Expect(logFilter.ToBlock).To(Equal(int64(4752016))) - Expect(logFilter.Address).To(Equal("0x8888f1f195afa192cfee860698584c030f4c9db1")) - Expect(logFilter.Topics).To(Equal( - core.Topics{ - "0x000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b", - "", - "0x000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b", - ""})) - }) - - It("decodes array of web3 filters to []LogFilter", func() { - - logFilters := make([]filters.LogFilter, 0) - jsonFilter := []byte( - `[{ - "name": "TestEvent", - "fromBlock": "0x1", - "toBlock": "0x488290", - "address": "0x8888f1f195afa192cfee860698584c030f4c9db1", - "topics": ["0x000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b", null, "0x000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b", null] - }, - { - "name": "TestEvent2", - "fromBlock": "0x3", - "toBlock": "0x4", - "address": "0xd26114cd6EE289AccF82350c8d8487fedB8A0C07", - "topics": ["0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef", "0x0000000000000000000000006b0949d4c6edfe467db78241b7d5566f3c2bb43e", "0x0000000000000000000000005e44c3e467a49c9ca0296a9f130fc433041aaa28"] - }]`) - err := json.Unmarshal(jsonFilter, &logFilters) - - Expect(err).ToNot(HaveOccurred()) - Expect(len(logFilters)).To(Equal(2)) - Expect(logFilters[0].Name).To(Equal("TestEvent")) - Expect(logFilters[1].Name).To(Equal("TestEvent2")) - }) - - It("requires valid ethereum address", func() { - - var logFilter filters.LogFilter - jsonFilter := []byte( - `{ - "name": "TestEvent", - "fromBlock": "0x1", - "toBlock": "0x2", - "address": "0x8888f1f195afa192cf84c030f4c9db1", - "topics": ["0x000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b", null, "0x000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b", null] - }`) - err := json.Unmarshal(jsonFilter, &logFilter) - Expect(err).To(HaveOccurred()) - - }) - It("requires name", func() { - - var logFilter filters.LogFilter - jsonFilter := []byte( - `{ - "fromBlock": "0x1", - "toBlock": "0x2", - "address": "0x8888f1f195afa192cfee860698584c030f4c9db1", - "topics": ["0x000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b", null, "0x000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b", null] - }`) - err := json.Unmarshal(jsonFilter, &logFilter) - Expect(err).To(HaveOccurred()) - - }) - - It("maps missing fromBlock to -1", func() { - - var logFilter filters.LogFilter - jsonFilter := []byte( - `{ - "name": "TestEvent", - "toBlock": "0x2", - "address": "0x8888f1f195afa192cfee860698584c030f4c9db1", - "topics": ["0x000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b", null, "0x000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b", null] - }`) - err := json.Unmarshal(jsonFilter, &logFilter) - Expect(err).ToNot(HaveOccurred()) - Expect(logFilter.FromBlock).To(Equal(int64(-1))) - - }) - - It("maps missing toBlock to -1", func() { - var logFilter filters.LogFilter - jsonFilter := []byte( - `{ - "name": "TestEvent", - "address": "0x8888f1f195afa192cfee860698584c030f4c9db1", - "topics": ["0x000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b", null, "0x000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b", null] - }`) - err := json.Unmarshal(jsonFilter, &logFilter) - Expect(err).ToNot(HaveOccurred()) - Expect(logFilter.ToBlock).To(Equal(int64(-1))) - - }) - -}) diff --git a/pkg/eth/filters/query_builder_suite_test.go b/pkg/eth/filters/query_builder_suite_test.go deleted file mode 100644 index 189a4314..00000000 --- a/pkg/eth/filters/query_builder_suite_test.go +++ /dev/null @@ -1,29 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package filters_test - -import ( - "testing" - - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" -) - -func TestQueryBuilder(t *testing.T) { - RegisterFailHandler(Fail) - RunSpecs(t, "QueryBuilder Suite") -} diff --git a/pkg/super_node/eth/helpers.go b/pkg/eth/helpers.go similarity index 100% rename from pkg/super_node/eth/helpers.go rename to pkg/eth/helpers.go diff --git a/pkg/eth/history/header_validator.go b/pkg/eth/history/header_validator.go deleted file mode 100644 index f68aa80d..00000000 --- a/pkg/eth/history/header_validator.go +++ /dev/null @@ -1,52 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package history - -import ( - "github.com/sirupsen/logrus" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" - "github.com/vulcanize/vulcanizedb/pkg/eth/datastore" -) - -type HeaderValidator struct { - blockChain core.BlockChain - headerRepository datastore.HeaderRepository - windowSize int -} - -func NewHeaderValidator(blockChain core.BlockChain, repository datastore.HeaderRepository, windowSize int) HeaderValidator { - return HeaderValidator{ - blockChain: blockChain, - headerRepository: repository, - windowSize: windowSize, - } -} - -func (validator HeaderValidator) ValidateHeaders() (ValidationWindow, error) { - window, err := MakeValidationWindow(validator.blockChain, validator.windowSize) - if err != nil { - logrus.Error("ValidateHeaders: error creating validation window: ", err) - return ValidationWindow{}, err - } - blockNumbers := MakeRange(window.LowerBound, window.UpperBound) - _, err = RetrieveAndUpdateHeaders(validator.blockChain, validator.headerRepository, blockNumbers) - if err != nil { - logrus.Error("ValidateHeaders: error getting/updating headers: ", err) - return ValidationWindow{}, err - } - return window, nil -} diff --git a/pkg/eth/history/header_validator_test.go b/pkg/eth/history/header_validator_test.go deleted file mode 100644 index 32035b06..00000000 --- a/pkg/eth/history/header_validator_test.go +++ /dev/null @@ -1,61 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package history_test - -import ( - "errors" - "math/big" - - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - - "github.com/vulcanize/vulcanizedb/pkg/eth/fakes" - "github.com/vulcanize/vulcanizedb/pkg/eth/history" -) - -var _ = Describe("Header validator", func() { - var ( - headerRepository *fakes.MockHeaderRepository - blockChain *fakes.MockBlockChain - ) - - BeforeEach(func() { - headerRepository = fakes.NewMockHeaderRepository() - blockChain = fakes.NewMockBlockChain() - }) - - It("attempts to create every header in the validation window", func() { - headerRepository.SetMissingBlockNumbers([]int64{}) - blockChain.SetLastBlock(big.NewInt(3)) - validator := history.NewHeaderValidator(blockChain, headerRepository, 2) - - _, err := validator.ValidateHeaders() - Expect(err).NotTo(HaveOccurred()) - - headerRepository.AssertCreateOrUpdateHeaderCallCountAndPassedBlockNumbers(3, []int64{1, 2, 3}) - }) - - It("propagates header repository errors", func() { - blockChain.SetLastBlock(big.NewInt(3)) - headerRepositoryError := errors.New("CreateOrUpdate") - headerRepository.SetCreateOrUpdateHeaderReturnErr(headerRepositoryError) - validator := history.NewHeaderValidator(blockChain, headerRepository, 2) - - _, err := validator.ValidateHeaders() - Expect(err).To(MatchError(headerRepositoryError)) - }) -}) diff --git a/pkg/eth/history/history_suite_test.go b/pkg/eth/history/history_suite_test.go deleted file mode 100644 index f903b0cd..00000000 --- a/pkg/eth/history/history_suite_test.go +++ /dev/null @@ -1,35 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package history_test - -import ( - "io/ioutil" - "testing" - - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - log "github.com/sirupsen/logrus" -) - -func init() { - log.SetOutput(ioutil.Discard) -} - -func TestHistory(t *testing.T) { - RegisterFailHandler(Fail) - RunSpecs(t, "History Suite") -} diff --git a/pkg/eth/history/populate_headers.go b/pkg/eth/history/populate_headers.go deleted file mode 100644 index 1114cc20..00000000 --- a/pkg/eth/history/populate_headers.go +++ /dev/null @@ -1,68 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package history - -import ( - "fmt" - "github.com/sirupsen/logrus" - - "github.com/vulcanize/vulcanizedb/pkg/eth/core" - "github.com/vulcanize/vulcanizedb/pkg/eth/datastore" - "github.com/vulcanize/vulcanizedb/pkg/eth/datastore/postgres/repositories" -) - -func PopulateMissingHeaders(blockChain core.BlockChain, headerRepository datastore.HeaderRepository, startingBlockNumber int64) (int, error) { - lastBlock, err := blockChain.LastBlock() - if err != nil { - logrus.Error("PopulateMissingHeaders: Error getting last block: ", err) - return 0, err - } - - blockNumbers, err := headerRepository.MissingBlockNumbers(startingBlockNumber, lastBlock.Int64(), blockChain.Node().ID) - if err != nil { - logrus.Error("PopulateMissingHeaders: Error getting missing block numbers: ", err) - return 0, err - } else if len(blockNumbers) == 0 { - return 0, nil - } - - logrus.Debug(getBlockRangeString(blockNumbers)) - _, err = RetrieveAndUpdateHeaders(blockChain, headerRepository, blockNumbers) - if err != nil { - logrus.Error("PopulateMissingHeaders: Error getting/updating headers: ", err) - return 0, err - } - return len(blockNumbers), nil -} - -func RetrieveAndUpdateHeaders(blockChain core.BlockChain, headerRepository datastore.HeaderRepository, blockNumbers []int64) (int, error) { - headers, err := blockChain.GetHeadersByNumbers(blockNumbers) - for _, header := range headers { - _, err = headerRepository.CreateOrUpdateHeader(header) - if err != nil { - if err == repositories.ErrValidHeaderExists { - continue - } - return 0, err - } - } - return len(blockNumbers), nil -} - -func getBlockRangeString(blockRange []int64) string { - return fmt.Sprintf("Backfilling |%v| blocks", len(blockRange)) -} diff --git a/pkg/eth/history/populate_headers_test.go b/pkg/eth/history/populate_headers_test.go deleted file mode 100644 index 018a45df..00000000 --- a/pkg/eth/history/populate_headers_test.go +++ /dev/null @@ -1,67 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package history_test - -import ( - "math/big" - - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - - "github.com/vulcanize/vulcanizedb/pkg/eth/fakes" - "github.com/vulcanize/vulcanizedb/pkg/eth/history" -) - -var _ = Describe("Populating headers", func() { - - var headerRepository *fakes.MockHeaderRepository - - BeforeEach(func() { - headerRepository = fakes.NewMockHeaderRepository() - }) - - It("returns number of headers added", func() { - blockChain := fakes.NewMockBlockChain() - blockChain.SetLastBlock(big.NewInt(2)) - headerRepository.SetMissingBlockNumbers([]int64{2}) - - headersAdded, err := history.PopulateMissingHeaders(blockChain, headerRepository, 1) - - Expect(err).NotTo(HaveOccurred()) - Expect(headersAdded).To(Equal(1)) - }) - - It("adds missing headers to the db", func() { - blockChain := fakes.NewMockBlockChain() - blockChain.SetLastBlock(big.NewInt(2)) - headerRepository.SetMissingBlockNumbers([]int64{2}) - - _, err := history.PopulateMissingHeaders(blockChain, headerRepository, 1) - - Expect(err).NotTo(HaveOccurred()) - headerRepository.AssertCreateOrUpdateHeaderCallCountAndPassedBlockNumbers(1, []int64{2}) - }) - - It("returns early if the db is already synced up to the head of the chain", func() { - blockChain := fakes.NewMockBlockChain() - blockChain.SetLastBlock(big.NewInt(2)) - headersAdded, err := history.PopulateMissingHeaders(blockChain, headerRepository, 2) - - Expect(err).NotTo(HaveOccurred()) - Expect(headersAdded).To(Equal(0)) - }) -}) diff --git a/pkg/eth/history/validation_window.go b/pkg/eth/history/validation_window.go deleted file mode 100644 index d9e46a1a..00000000 --- a/pkg/eth/history/validation_window.go +++ /dev/null @@ -1,57 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package history - -import ( - "fmt" - - log "github.com/sirupsen/logrus" - - "github.com/vulcanize/vulcanizedb/pkg/eth/core" -) - -type ValidationWindow struct { - LowerBound int64 - UpperBound int64 -} - -func (window ValidationWindow) Size() int { - return int(window.UpperBound - window.LowerBound) -} - -func MakeValidationWindow(blockchain core.BlockChain, windowSize int) (ValidationWindow, error) { - upperBound, err := blockchain.LastBlock() - if err != nil { - log.Error("MakeValidationWindow: error getting LastBlock: ", err) - return ValidationWindow{}, err - } - lowerBound := upperBound.Int64() - int64(windowSize) - return ValidationWindow{lowerBound, upperBound.Int64()}, nil -} - -func MakeRange(min, max int64) []int64 { - a := make([]int64, max-min+1) - for i := range a { - a[i] = min + int64(i) - } - return a -} - -func (window ValidationWindow) GetString() string { - return fmt.Sprintf("Validating Blocks |%v|-- Validation Window --|%v|", - window.LowerBound, window.UpperBound) -} diff --git a/pkg/eth/history/validation_window_test.go b/pkg/eth/history/validation_window_test.go deleted file mode 100644 index baeaa491..00000000 --- a/pkg/eth/history/validation_window_test.go +++ /dev/null @@ -1,53 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package history_test - -import ( - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - - "math/big" - - "github.com/vulcanize/vulcanizedb/pkg/eth/fakes" - "github.com/vulcanize/vulcanizedb/pkg/eth/history" -) - -var _ = Describe("Validation window", func() { - It("creates a ValidationWindow equal to (HEAD-windowSize, HEAD)", func() { - blockChain := fakes.NewMockBlockChain() - blockChain.SetLastBlock(big.NewInt(5)) - - validationWindow, err := history.MakeValidationWindow(blockChain, 2) - - Expect(err).NotTo(HaveOccurred()) - Expect(validationWindow.LowerBound).To(Equal(int64(3))) - Expect(validationWindow.UpperBound).To(Equal(int64(5))) - }) - - It("returns the window size", func() { - window := history.ValidationWindow{LowerBound: 1, UpperBound: 3} - - Expect(window.Size()).To(Equal(2)) - }) - - It("generates a range of int64s", func() { - numberOfBlocksCreated := history.MakeRange(0, 5) - expected := []int64{0, 1, 2, 3, 4, 5} - - Expect(numberOfBlocksCreated).To(Equal(expected)) - }) -}) diff --git a/pkg/super_node/eth/indexer.go b/pkg/eth/indexer.go similarity index 98% rename from pkg/super_node/eth/indexer.go rename to pkg/eth/indexer.go index a35231d5..1bf3b44f 100644 --- a/pkg/super_node/eth/indexer.go +++ b/pkg/eth/indexer.go @@ -23,8 +23,8 @@ import ( "github.com/jmoiron/sqlx" log "github.com/sirupsen/logrus" - "github.com/vulcanize/vulcanizedb/pkg/postgres" - "github.com/vulcanize/vulcanizedb/pkg/super_node/shared" + "github.com/vulcanize/ipfs-chain-watcher/pkg/postgres" + "github.com/vulcanize/ipfs-chain-watcher/pkg/shared" ) var ( diff --git a/pkg/super_node/eth/indexer_test.go b/pkg/eth/indexer_test.go similarity index 95% rename from pkg/super_node/eth/indexer_test.go rename to pkg/eth/indexer_test.go index 471a3c2b..1540ce19 100644 --- a/pkg/super_node/eth/indexer_test.go +++ b/pkg/eth/indexer_test.go @@ -21,10 +21,10 @@ import ( . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" - "github.com/vulcanize/vulcanizedb/pkg/postgres" - "github.com/vulcanize/vulcanizedb/pkg/super_node/eth" - "github.com/vulcanize/vulcanizedb/pkg/super_node/eth/mocks" - "github.com/vulcanize/vulcanizedb/pkg/super_node/shared" + "github.com/vulcanize/ipfs-chain-watcher/pkg/eth" + "github.com/vulcanize/ipfs-chain-watcher/pkg/eth/mocks" + "github.com/vulcanize/ipfs-chain-watcher/pkg/postgres" + "github.com/vulcanize/ipfs-chain-watcher/pkg/shared" ) var _ = Describe("Indexer", func() { diff --git a/pkg/super_node/eth/ipld_fetcher.go b/pkg/eth/ipld_fetcher.go similarity index 98% rename from pkg/super_node/eth/ipld_fetcher.go rename to pkg/eth/ipld_fetcher.go index ae3b5f8c..d0d35d9f 100644 --- a/pkg/super_node/eth/ipld_fetcher.go +++ b/pkg/eth/ipld_fetcher.go @@ -28,8 +28,8 @@ import ( "github.com/ipfs/go-cid" log "github.com/sirupsen/logrus" - "github.com/vulcanize/vulcanizedb/pkg/ipfs" - "github.com/vulcanize/vulcanizedb/pkg/super_node/shared" + "github.com/vulcanize/ipfs-chain-watcher/pkg/ipfs" + "github.com/vulcanize/ipfs-chain-watcher/pkg/shared" ) var ( diff --git a/pkg/super_node/eth/ipld_fetcher_test.go b/pkg/eth/ipld_fetcher_test.go similarity index 97% rename from pkg/super_node/eth/ipld_fetcher_test.go rename to pkg/eth/ipld_fetcher_test.go index d7bc9b73..8710214e 100644 --- a/pkg/super_node/eth/ipld_fetcher_test.go +++ b/pkg/eth/ipld_fetcher_test.go @@ -26,9 +26,9 @@ import ( . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" - "github.com/vulcanize/vulcanizedb/pkg/ipfs" - "github.com/vulcanize/vulcanizedb/pkg/ipfs/mocks" - "github.com/vulcanize/vulcanizedb/pkg/super_node/eth" + "github.com/vulcanize/ipfs-chain-watcher/pkg/eth" + "github.com/vulcanize/ipfs-chain-watcher/pkg/ipfs" + "github.com/vulcanize/ipfs-chain-watcher/pkg/ipfs/mocks" ) var ( diff --git a/pkg/super_node/eth/ipld_pg_fetcher.go b/pkg/eth/ipld_pg_fetcher.go similarity index 97% rename from pkg/super_node/eth/ipld_pg_fetcher.go rename to pkg/eth/ipld_pg_fetcher.go index 61fb53db..2c65b851 100644 --- a/pkg/super_node/eth/ipld_pg_fetcher.go +++ b/pkg/eth/ipld_pg_fetcher.go @@ -25,9 +25,9 @@ import ( "github.com/jmoiron/sqlx" log "github.com/sirupsen/logrus" - "github.com/vulcanize/vulcanizedb/pkg/ipfs" - "github.com/vulcanize/vulcanizedb/pkg/postgres" - "github.com/vulcanize/vulcanizedb/pkg/super_node/shared" + "github.com/vulcanize/ipfs-chain-watcher/pkg/ipfs" + "github.com/vulcanize/ipfs-chain-watcher/pkg/postgres" + "github.com/vulcanize/ipfs-chain-watcher/pkg/shared" ) // IPLDPGFetcher satisfies the IPLDFetcher interface for ethereum diff --git a/pkg/super_node/eth/ipld_pg_fetcher_test.go b/pkg/eth/ipld_pg_fetcher_test.go similarity index 90% rename from pkg/super_node/eth/ipld_pg_fetcher_test.go rename to pkg/eth/ipld_pg_fetcher_test.go index d99643e2..5a5b2bff 100644 --- a/pkg/super_node/eth/ipld_pg_fetcher_test.go +++ b/pkg/eth/ipld_pg_fetcher_test.go @@ -20,10 +20,10 @@ import ( . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" - "github.com/vulcanize/vulcanizedb/pkg/postgres" - "github.com/vulcanize/vulcanizedb/pkg/super_node/eth" - "github.com/vulcanize/vulcanizedb/pkg/super_node/eth/mocks" - "github.com/vulcanize/vulcanizedb/pkg/super_node/shared" + "github.com/vulcanize/ipfs-chain-watcher/pkg/eth" + "github.com/vulcanize/ipfs-chain-watcher/pkg/eth/mocks" + "github.com/vulcanize/ipfs-chain-watcher/pkg/postgres" + "github.com/vulcanize/ipfs-chain-watcher/pkg/shared" ) var ( diff --git a/libraries/shared/mocks/batch_client.go b/pkg/eth/mocks/batch_client.go similarity index 98% rename from libraries/shared/mocks/batch_client.go rename to pkg/eth/mocks/batch_client.go index 4ad2d12b..f9f85bc5 100644 --- a/libraries/shared/mocks/batch_client.go +++ b/pkg/eth/mocks/batch_client.go @@ -24,7 +24,7 @@ import ( "github.com/ethereum/go-ethereum/rpc" "github.com/ethereum/go-ethereum/statediff" - "github.com/vulcanize/vulcanizedb/pkg/eth/client" + "github.com/vulcanize/ipfs-chain-watcher/pkg/client" ) // BackFillerClient is a mock client for use in backfiller tests diff --git a/pkg/super_node/eth/mocks/converter.go b/pkg/eth/mocks/converter.go similarity index 95% rename from pkg/super_node/eth/mocks/converter.go rename to pkg/eth/mocks/converter.go index eea84595..6ec6cc7d 100644 --- a/pkg/super_node/eth/mocks/converter.go +++ b/pkg/eth/mocks/converter.go @@ -19,11 +19,10 @@ package mocks import ( "fmt" - "github.com/vulcanize/vulcanizedb/pkg/super_node/shared" - "github.com/ethereum/go-ethereum/statediff" - "github.com/vulcanize/vulcanizedb/pkg/super_node/eth" + "github.com/vulcanize/ipfs-chain-watcher/pkg/eth" + "github.com/vulcanize/ipfs-chain-watcher/pkg/shared" ) // PayloadConverter is the underlying struct for the Converter interface diff --git a/pkg/super_node/eth/mocks/indexer.go b/pkg/eth/mocks/indexer.go similarity index 91% rename from pkg/super_node/eth/mocks/indexer.go rename to pkg/eth/mocks/indexer.go index c05cac79..99ab2e0a 100644 --- a/pkg/super_node/eth/mocks/indexer.go +++ b/pkg/eth/mocks/indexer.go @@ -19,9 +19,9 @@ package mocks import ( "fmt" - "github.com/vulcanize/vulcanizedb/pkg/super_node/shared" + "github.com/vulcanize/ipfs-chain-watcher/pkg/shared" - "github.com/vulcanize/vulcanizedb/pkg/super_node/eth" + "github.com/vulcanize/ipfs-chain-watcher/pkg/eth" ) // CIDIndexer is the underlying struct for the Indexer interface diff --git a/pkg/super_node/eth/mocks/publisher.go b/pkg/eth/mocks/publisher.go similarity index 95% rename from pkg/super_node/eth/mocks/publisher.go rename to pkg/eth/mocks/publisher.go index a33e1211..0c09689f 100644 --- a/pkg/super_node/eth/mocks/publisher.go +++ b/pkg/eth/mocks/publisher.go @@ -19,9 +19,9 @@ package mocks import ( "fmt" - "github.com/vulcanize/vulcanizedb/pkg/super_node/shared" + "github.com/vulcanize/ipfs-chain-watcher/pkg/shared" - "github.com/vulcanize/vulcanizedb/pkg/super_node/eth" + "github.com/vulcanize/ipfs-chain-watcher/pkg/eth" ) // IPLDPublisher is the underlying struct for the Publisher interface diff --git a/libraries/shared/mocks/stream_client.go b/pkg/eth/mocks/stream_client.go similarity index 100% rename from libraries/shared/mocks/stream_client.go rename to pkg/eth/mocks/stream_client.go diff --git a/pkg/super_node/eth/mocks/test_data.go b/pkg/eth/mocks/test_data.go similarity index 97% rename from pkg/super_node/eth/mocks/test_data.go rename to pkg/eth/mocks/test_data.go index 4c1eea14..10c7e750 100644 --- a/pkg/super_node/eth/mocks/test_data.go +++ b/pkg/eth/mocks/test_data.go @@ -34,10 +34,9 @@ import ( "github.com/multiformats/go-multihash" log "github.com/sirupsen/logrus" - "github.com/vulcanize/vulcanizedb/pkg/ipfs" - "github.com/vulcanize/vulcanizedb/pkg/ipfs/ipld" - "github.com/vulcanize/vulcanizedb/pkg/super_node/eth" - eth2 "github.com/vulcanize/vulcanizedb/pkg/super_node/eth" + "github.com/vulcanize/ipfs-chain-watcher/pkg/eth" + "github.com/vulcanize/ipfs-chain-watcher/pkg/ipfs" + "github.com/vulcanize/ipfs-chain-watcher/pkg/ipfs/ipld" ) // Test variables @@ -338,7 +337,7 @@ var ( } MockCIDPayload = ð.CIDPayload{ - HeaderCID: eth2.HeaderModel{ + HeaderCID: eth.HeaderModel{ BlockHash: MockBlock.Hash().String(), BlockNumber: MockBlock.Number().String(), CID: HeaderCID.String(), @@ -352,7 +351,7 @@ var ( Bloom: MockBlock.Bloom().Bytes(), Timestamp: MockBlock.Time(), }, - UncleCIDs: []eth2.UncleModel{}, + UncleCIDs: []eth.UncleModel{}, TransactionCIDs: MockTrxMetaPostPublsh, ReceiptCIDs: map[common.Hash]eth.ReceiptModel{ MockTransactions[0].Hash(): MockRctMetaPostPublish[0], @@ -388,7 +387,7 @@ var ( MockCIDWrapper = ð.CIDWrapper{ BlockNumber: big.NewInt(1), - Header: eth2.HeaderModel{ + Header: eth.HeaderModel{ BlockNumber: "1", BlockHash: MockBlock.Hash().String(), ParentHash: "0x0000000000000000000000000000000000000000000000000000000000000000", @@ -405,7 +404,7 @@ var ( }, Transactions: MockTrxMetaPostPublsh, Receipts: MockRctMetaPostPublish, - Uncles: []eth2.UncleModel{}, + Uncles: []eth.UncleModel{}, StateNodes: MockStateMetaPostPublish, StorageNodes: []eth.StorageNodeWithStateKeyModel{ { @@ -463,7 +462,7 @@ var ( CID: Rct3IPLD.Cid().String(), }, }, - StateNodes: []eth2.StateNode{ + StateNodes: []eth.StateNode{ { StateLeafKey: common.BytesToHash(ContractLeafKey), Type: statediff.Leaf, @@ -483,7 +482,7 @@ var ( Path: []byte{'\x0c'}, }, }, - StorageNodes: []eth2.StorageNode{ + StorageNodes: []eth.StorageNode{ { StateLeafKey: common.BytesToHash(ContractLeafKey), StorageLeafKey: common.BytesToHash(StorageLeafKey), diff --git a/pkg/super_node/eth/models.go b/pkg/eth/models.go similarity index 100% rename from pkg/super_node/eth/models.go rename to pkg/eth/models.go diff --git a/pkg/eth/node/node.go b/pkg/eth/node/node.go deleted file mode 100644 index e918d2a5..00000000 --- a/pkg/eth/node/node.go +++ /dev/null @@ -1,168 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package node - -import ( - "context" - "fmt" - "regexp" - "strconv" - "strings" - - "github.com/ethereum/go-ethereum/core/types" - "github.com/ethereum/go-ethereum/p2p" - log "github.com/sirupsen/logrus" - - "github.com/vulcanize/vulcanizedb/pkg/eth/core" -) - -type IPropertiesReader interface { - NodeInfo() (id string, name string) - NetworkID() float64 - GenesisBlock() string -} - -type PropertiesReader struct { - client core.RPCClient -} - -type ParityClient struct { - PropertiesReader -} - -type GethClient struct { - PropertiesReader -} - -type InfuraClient struct { - PropertiesReader -} - -type GanacheClient struct { - PropertiesReader -} - -func MakeNode(rpcClient core.RPCClient) core.Node { - pr := makePropertiesReader(rpcClient) - id, name := pr.NodeInfo() - return core.Node{ - GenesisBlock: pr.GenesisBlock(), - NetworkID: fmt.Sprintf("%f", pr.NetworkID()), - ID: id, - ClientName: name, - } -} - -func makePropertiesReader(client core.RPCClient) IPropertiesReader { - switch getNodeType(client) { - case core.GETH: - return GethClient{PropertiesReader: PropertiesReader{client: client}} - case core.PARITY: - return ParityClient{PropertiesReader: PropertiesReader{client: client}} - case core.INFURA: - return InfuraClient{PropertiesReader: PropertiesReader{client: client}} - case core.GANACHE: - return GanacheClient{PropertiesReader: PropertiesReader{client: client}} - default: - return PropertiesReader{client: client} - } -} - -func getNodeType(client core.RPCClient) core.NodeType { - // TODO: fix this - // This heuristics for figuring out the node type are not usefull... - // for example we often port forward remote nodes to localhost - // and geth does not have to expose the admin api... - if strings.Contains(client.IpcPath(), "infura") { - return core.INFURA - } - if strings.Contains(client.IpcPath(), "127.0.0.1") || strings.Contains(client.IpcPath(), "localhost") { - return core.GANACHE - } - modules, _ := client.SupportedModules() - if _, ok := modules["admin"]; ok { - return core.GETH - } - return core.PARITY -} - -func (reader PropertiesReader) NetworkID() float64 { - var version string - err := reader.client.CallContext(context.Background(), &version, "net_version") - if err != nil { - log.Error(err) - } - networkID, _ := strconv.ParseFloat(version, 64) - return networkID -} - -func (reader PropertiesReader) GenesisBlock() string { - var header *types.Header - blockZero := "0x0" - includeTransactions := false - err := reader.client.CallContext(context.Background(), &header, "eth_getBlockByNumber", blockZero, includeTransactions) - if err != nil { - log.Error(err) - } - return header.Hash().Hex() -} - -func (reader PropertiesReader) NodeInfo() (string, string) { - var info p2p.NodeInfo - err := reader.client.CallContext(context.Background(), &info, "admin_nodeInfo") - if err != nil { - log.Error(err) - } - return info.ID, info.Name -} - -func (client ParityClient) NodeInfo() (string, string) { - nodeInfo := client.parityNodeInfo() - id := client.parityID() - return id, nodeInfo -} - -func (client InfuraClient) NodeInfo() (string, string) { - return "infura", "infura" -} - -func (client GanacheClient) NodeInfo() (string, string) { - return "ganache", "ganache" -} - -func (client ParityClient) parityNodeInfo() string { - var nodeInfo core.ParityNodeInfo - err := client.client.CallContext(context.Background(), &nodeInfo, "parity_versionInfo") - if err != nil { - log.Error(err) - } - return nodeInfo.String() -} - -func (client ParityClient) parityID() string { - var enodeID = regexp.MustCompile(`^enode://(.+)@.+$`) - var enodeURL string - err := client.client.CallContext(context.Background(), &enodeURL, "parity_enode") - if err != nil { - log.Error(err) - } - enode := enodeID.FindStringSubmatch(enodeURL) - if len(enode) < 2 { - return "" - } - return enode[1] -} diff --git a/pkg/eth/node/node_suite_test.go b/pkg/eth/node/node_suite_test.go deleted file mode 100644 index 38ca2516..00000000 --- a/pkg/eth/node/node_suite_test.go +++ /dev/null @@ -1,29 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package node_test - -import ( - "testing" - - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" -) - -func TestNode(t *testing.T) { - RegisterFailHandler(Fail) - RunSpecs(t, "Node Suite") -} diff --git a/pkg/eth/node/node_test.go b/pkg/eth/node/node_test.go deleted file mode 100644 index 3bee1fad..00000000 --- a/pkg/eth/node/node_test.go +++ /dev/null @@ -1,119 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package node_test - -import ( - "encoding/json" - - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - - "github.com/vulcanize/vulcanizedb/pkg/eth/core" - "github.com/vulcanize/vulcanizedb/pkg/eth/fakes" - "github.com/vulcanize/vulcanizedb/pkg/eth/node" -) - -var EmpytHeaderHash = "0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347" - -var _ = Describe("Node Info", func() { - Describe("Parity Node Info", func() { - It("verifies parity_versionInfo can be unmarshalled into ParityNodeInfo", func() { - var parityNodeInfo core.ParityNodeInfo - nodeInfoJSON := []byte( - `{ - "hash": "0x2ae8b4ca278dd7b896090366615fef81cbbbc0e0", - "track": "null", - "version": { - "major": 1, - "minor": 6, - "patch": 0 - } - }`) - json.Unmarshal(nodeInfoJSON, &parityNodeInfo) - Expect(parityNodeInfo.Hash).To(Equal("0x2ae8b4ca278dd7b896090366615fef81cbbbc0e0")) - Expect(parityNodeInfo.Track).To(Equal("null")) - Expect(parityNodeInfo.Major).To(Equal(1)) - Expect(parityNodeInfo.Minor).To(Equal(6)) - Expect(parityNodeInfo.Patch).To(Equal(0)) - }) - - It("Creates client string", func() { - parityNodeInfo := core.ParityNodeInfo{ - Track: "null", - ParityVersion: core.ParityVersion{ - Major: 1, - Minor: 6, - Patch: 0, - }, - Hash: "0x1232144j", - } - Expect(parityNodeInfo.String()).To(Equal("Parity/v1.6.0/")) - }) - - It("returns parity ID and client name for parity node", func() { - client := fakes.NewMockRPCClient() - - n := node.MakeNode(client) - Expect(n.ID).To(Equal("ParityNode")) - Expect(n.ClientName).To(Equal("Parity/v1.2.3/")) - }) - }) - - It("returns the genesis block for any client", func() { - client := fakes.NewMockRPCClient() - n := node.MakeNode(client) - Expect(n.GenesisBlock).To(Equal(EmpytHeaderHash)) - }) - - It("returns the network id for any client", func() { - client := fakes.NewMockRPCClient() - n := node.MakeNode(client) - Expect(n.NetworkID).To(Equal("1234.000000")) - }) - - It("returns geth ID and client name for geth node", func() { - client := fakes.NewMockRPCClient() - supportedModules := make(map[string]string) - supportedModules["admin"] = "ok" - client.SetSupporedModules(supportedModules) - - n := node.MakeNode(client) - Expect(n.ID).To(Equal("enode://GethNode@172.17.0.1:30303")) - Expect(n.ClientName).To(Equal("Geth/v1.7")) - }) - - It("returns infura ID and client name for infura node", func() { - client := fakes.NewMockRPCClient() - client.SetIpcPath("infura/path") - n := node.MakeNode(client) - Expect(n.ID).To(Equal("infura")) - Expect(n.ClientName).To(Equal("infura")) - }) - - It("returns local id and client name for Local node", func() { - client := fakes.NewMockRPCClient() - client.SetIpcPath("127.0.0.1") - n := node.MakeNode(client) - Expect(n.ID).To(Equal("ganache")) - Expect(n.ClientName).To(Equal("ganache")) - - client.SetIpcPath("localhost") - n = node.MakeNode(client) - Expect(n.ID).To(Equal("ganache")) - Expect(n.ClientName).To(Equal("ganache")) - }) -}) diff --git a/pkg/super_node/eth/payload_fetcher.go b/pkg/eth/payload_fetcher.go similarity index 98% rename from pkg/super_node/eth/payload_fetcher.go rename to pkg/eth/payload_fetcher.go index f771ab46..64e36476 100644 --- a/pkg/super_node/eth/payload_fetcher.go +++ b/pkg/eth/payload_fetcher.go @@ -24,7 +24,7 @@ import ( "github.com/ethereum/go-ethereum/rpc" "github.com/ethereum/go-ethereum/statediff" - "github.com/vulcanize/vulcanizedb/pkg/super_node/shared" + "github.com/vulcanize/ipfs-chain-watcher/pkg/shared" ) // BatchClient is an interface to a batch-fetching geth rpc client; created to allow mock insertion diff --git a/pkg/super_node/eth/payload_fetcher_test.go b/pkg/eth/payload_fetcher_test.go similarity index 74% rename from pkg/super_node/eth/payload_fetcher_test.go rename to pkg/eth/payload_fetcher_test.go index 572c706a..12a18481 100644 --- a/pkg/super_node/eth/payload_fetcher_test.go +++ b/pkg/eth/payload_fetcher_test.go @@ -23,9 +23,8 @@ import ( . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" - "github.com/vulcanize/vulcanizedb/libraries/shared/mocks" - "github.com/vulcanize/vulcanizedb/libraries/shared/test_data" - "github.com/vulcanize/vulcanizedb/pkg/super_node/eth" + "github.com/vulcanize/ipfs-chain-watcher/pkg/eth" + "github.com/vulcanize/ipfs-chain-watcher/pkg/eth/mocks" ) var _ = Describe("StateDiffFetcher", func() { @@ -33,19 +32,24 @@ var _ = Describe("StateDiffFetcher", func() { var ( mc *mocks.BackFillerClient stateDiffFetcher *eth.PayloadFetcher + payload2 statediff.Payload + blockNumber2 uint64 ) BeforeEach(func() { mc = new(mocks.BackFillerClient) - err := mc.SetReturnDiffAt(test_data.BlockNumber.Uint64(), test_data.MockStatediffPayload) + err := mc.SetReturnDiffAt(mocks.BlockNumber.Uint64(), mocks.MockStateDiffPayload) Expect(err).ToNot(HaveOccurred()) - err = mc.SetReturnDiffAt(test_data.BlockNumber2.Uint64(), test_data.MockStatediffPayload2) + payload2 = mocks.MockStateDiffPayload + payload2.BlockRlp = []byte{} + blockNumber2 = mocks.BlockNumber.Uint64() + 1 + err = mc.SetReturnDiffAt(blockNumber2, payload2) Expect(err).ToNot(HaveOccurred()) stateDiffFetcher = eth.NewPayloadFetcher(mc, time.Second*60) }) It("Batch calls statediff_stateDiffAt", func() { blockHeights := []uint64{ - test_data.BlockNumber.Uint64(), - test_data.BlockNumber2.Uint64(), + mocks.BlockNumber.Uint64(), + blockNumber2, } stateDiffPayloads, err := stateDiffFetcher.FetchAt(blockHeights) Expect(err).ToNot(HaveOccurred()) @@ -54,8 +58,8 @@ var _ = Describe("StateDiffFetcher", func() { Expect(ok).To(BeTrue()) payload2, ok := stateDiffPayloads[1].(statediff.Payload) Expect(ok).To(BeTrue()) - Expect(payload1).To(Equal(test_data.MockStatediffPayload)) - Expect(payload2).To(Equal(test_data.MockStatediffPayload2)) + Expect(payload1).To(Equal(mocks.MockStateDiffPayload)) + Expect(payload2).To(Equal(payload2)) }) }) }) diff --git a/pkg/super_node/eth/publish_and_indexer.go b/pkg/eth/publish_and_indexer.go similarity index 93% rename from pkg/super_node/eth/publish_and_indexer.go rename to pkg/eth/publish_and_indexer.go index 54f844a5..9384ee80 100644 --- a/pkg/super_node/eth/publish_and_indexer.go +++ b/pkg/eth/publish_and_indexer.go @@ -26,10 +26,9 @@ import ( "github.com/jmoiron/sqlx" "github.com/multiformats/go-multihash" - common2 "github.com/vulcanize/vulcanizedb/pkg/eth/converters/common" - "github.com/vulcanize/vulcanizedb/pkg/ipfs/ipld" - "github.com/vulcanize/vulcanizedb/pkg/postgres" - "github.com/vulcanize/vulcanizedb/pkg/super_node/shared" + "github.com/vulcanize/ipfs-chain-watcher/pkg/ipfs/ipld" + "github.com/vulcanize/ipfs-chain-watcher/pkg/postgres" + "github.com/vulcanize/ipfs-chain-watcher/pkg/shared" ) // IPLDPublisherAndIndexer satisfies the IPLDPublisher interface for ethereum @@ -90,7 +89,7 @@ func (pub *IPLDPublisherAndIndexer) Publish(payload shared.ConvertedData) (share if err := shared.PublishIPLD(tx, headerNode); err != nil { return nil, err } - reward := common2.CalcEthBlockReward(ipldPayload.Block.Header(), ipldPayload.Block.Uncles(), ipldPayload.Block.Transactions(), ipldPayload.Receipts) + reward := CalcEthBlockReward(ipldPayload.Block.Header(), ipldPayload.Block.Uncles(), ipldPayload.Block.Transactions(), ipldPayload.Receipts) header := HeaderModel{ CID: headerNode.Cid().String(), ParentHash: ipldPayload.Block.ParentHash().String(), @@ -115,7 +114,7 @@ func (pub *IPLDPublisherAndIndexer) Publish(payload shared.ConvertedData) (share if err := shared.PublishIPLD(tx, uncleNode); err != nil { return nil, err } - uncleReward := common2.CalcUncleMinerReward(ipldPayload.Block.Number().Int64(), uncleNode.Number.Int64()) + uncleReward := CalcUncleMinerReward(ipldPayload.Block.Number().Int64(), uncleNode.Number.Int64()) uncle := UncleModel{ CID: uncleNode.Cid().String(), ParentHash: uncleNode.ParentHash.String(), diff --git a/pkg/super_node/eth/publish_and_indexer_test.go b/pkg/eth/publish_and_indexer_test.go similarity index 97% rename from pkg/super_node/eth/publish_and_indexer_test.go rename to pkg/eth/publish_and_indexer_test.go index 775b2bb6..2252ede4 100644 --- a/pkg/super_node/eth/publish_and_indexer_test.go +++ b/pkg/eth/publish_and_indexer_test.go @@ -23,10 +23,11 @@ import ( "github.com/ipfs/go-ipfs-ds-help" . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" - "github.com/vulcanize/vulcanizedb/pkg/postgres" - "github.com/vulcanize/vulcanizedb/pkg/super_node/eth" - "github.com/vulcanize/vulcanizedb/pkg/super_node/eth/mocks" - "github.com/vulcanize/vulcanizedb/pkg/super_node/shared" + + "github.com/vulcanize/ipfs-chain-watcher/pkg/eth" + "github.com/vulcanize/ipfs-chain-watcher/pkg/eth/mocks" + "github.com/vulcanize/ipfs-chain-watcher/pkg/postgres" + "github.com/vulcanize/ipfs-chain-watcher/pkg/shared" ) var _ = Describe("PublishAndIndexer", func() { diff --git a/pkg/super_node/eth/publisher.go b/pkg/eth/publisher.go similarity index 94% rename from pkg/super_node/eth/publisher.go rename to pkg/eth/publisher.go index a4d839b1..74aa114d 100644 --- a/pkg/super_node/eth/publisher.go +++ b/pkg/eth/publisher.go @@ -25,11 +25,10 @@ import ( "github.com/ethereum/go-ethereum/rlp" "github.com/ethereum/go-ethereum/statediff" - common2 "github.com/vulcanize/vulcanizedb/pkg/eth/converters/common" - "github.com/vulcanize/vulcanizedb/pkg/ipfs" - "github.com/vulcanize/vulcanizedb/pkg/ipfs/dag_putters" - "github.com/vulcanize/vulcanizedb/pkg/ipfs/ipld" - "github.com/vulcanize/vulcanizedb/pkg/super_node/shared" + "github.com/vulcanize/ipfs-chain-watcher/pkg/ipfs" + "github.com/vulcanize/ipfs-chain-watcher/pkg/ipfs/dag_putters" + "github.com/vulcanize/ipfs-chain-watcher/pkg/ipfs/ipld" + "github.com/vulcanize/ipfs-chain-watcher/pkg/shared" ) // IPLDPublisher satisfies the IPLDPublisher for ethereum @@ -77,7 +76,7 @@ func (pub *IPLDPublisher) Publish(payload shared.ConvertedData) (shared.CIDsForI if err != nil { return nil, err } - reward := common2.CalcEthBlockReward(ipldPayload.Block.Header(), ipldPayload.Block.Uncles(), ipldPayload.Block.Transactions(), ipldPayload.Receipts) + reward := CalcEthBlockReward(ipldPayload.Block.Header(), ipldPayload.Block.Uncles(), ipldPayload.Block.Transactions(), ipldPayload.Receipts) header := HeaderModel{ CID: headerCid, ParentHash: ipldPayload.Block.ParentHash().String(), @@ -100,7 +99,7 @@ func (pub *IPLDPublisher) Publish(payload shared.ConvertedData) (shared.CIDsForI if err != nil { return nil, err } - uncleReward := common2.CalcUncleMinerReward(ipldPayload.Block.Number().Int64(), uncle.Number.Int64()) + uncleReward := CalcUncleMinerReward(ipldPayload.Block.Number().Int64(), uncle.Number.Int64()) uncleCids[i] = UncleModel{ CID: uncleCid, ParentHash: uncle.ParentHash.String(), diff --git a/pkg/super_node/eth/publisher_test.go b/pkg/eth/publisher_test.go similarity index 96% rename from pkg/super_node/eth/publisher_test.go rename to pkg/eth/publisher_test.go index ca0aa3d6..343ab971 100644 --- a/pkg/super_node/eth/publisher_test.go +++ b/pkg/eth/publisher_test.go @@ -21,9 +21,9 @@ import ( . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" - mocks2 "github.com/vulcanize/vulcanizedb/pkg/ipfs/mocks" - "github.com/vulcanize/vulcanizedb/pkg/super_node/eth" - "github.com/vulcanize/vulcanizedb/pkg/super_node/eth/mocks" + "github.com/vulcanize/ipfs-chain-watcher/pkg/eth" + "github.com/vulcanize/ipfs-chain-watcher/pkg/eth/mocks" + mocks2 "github.com/vulcanize/ipfs-chain-watcher/pkg/ipfs/mocks" ) var ( diff --git a/pkg/eth/converters/common/block_rewards.go b/pkg/eth/reward.go similarity index 63% rename from pkg/eth/converters/common/block_rewards.go rename to pkg/eth/reward.go index 114c0c92..3949933d 100644 --- a/pkg/eth/converters/common/block_rewards.go +++ b/pkg/eth/reward.go @@ -14,68 +14,14 @@ // You should have received a copy of the GNU Affero General Public License // along with this program. If not, see . -package common +package eth import ( "math/big" "github.com/ethereum/go-ethereum/core/types" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" ) -func CalcBlockReward(block core.Block, uncles []*types.Header) *big.Int { - staticBlockReward := staticRewardByBlockNumber(block.Number) - transactionFees := calcTransactionFees(block) - uncleInclusionRewards := calcUncleInclusionRewards(block, uncles) - tmp := transactionFees.Add(transactionFees, uncleInclusionRewards) - return tmp.Add(tmp, staticBlockReward) -} - -func calcUncleMinerReward(blockNumber, uncleBlockNumber int64) *big.Int { - staticBlockReward := staticRewardByBlockNumber(blockNumber) - rewardDiv8 := staticBlockReward.Div(staticBlockReward, big.NewInt(8)) - mainBlock := big.NewInt(blockNumber) - uncleBlock := big.NewInt(uncleBlockNumber) - uncleBlockPlus8 := uncleBlock.Add(uncleBlock, big.NewInt(8)) - uncleBlockPlus8MinusMainBlock := uncleBlockPlus8.Sub(uncleBlockPlus8, mainBlock) - return rewardDiv8.Mul(rewardDiv8, uncleBlockPlus8MinusMainBlock) -} - -func calcTransactionFees(block core.Block) *big.Int { - transactionFees := new(big.Int) - for _, transaction := range block.Transactions { - receipt := transaction.Receipt - gasPrice := big.NewInt(transaction.GasPrice) - gasUsed := big.NewInt(int64(receipt.GasUsed)) - transactionFee := gasPrice.Mul(gasPrice, gasUsed) - transactionFees = transactionFees.Add(transactionFees, transactionFee) - } - return transactionFees -} - -func calcUncleInclusionRewards(block core.Block, uncles []*types.Header) *big.Int { - uncleInclusionRewards := new(big.Int) - for range uncles { - staticBlockReward := staticRewardByBlockNumber(block.Number) - staticBlockReward.Div(staticBlockReward, big.NewInt(32)) - uncleInclusionRewards.Add(uncleInclusionRewards, staticBlockReward) - } - return uncleInclusionRewards -} - -func staticRewardByBlockNumber(blockNumber int64) *big.Int { - staticBlockReward := new(big.Int) - //https://blog.ethereum.org/2017/10/12/byzantium-hf-announcement/ - if blockNumber >= 7280000 { - staticBlockReward.SetString("2000000000000000000", 10) - } else if blockNumber >= 4370000 { - staticBlockReward.SetString("3000000000000000000", 10) - } else { - staticBlockReward.SetString("5000000000000000000", 10) - } - return staticBlockReward -} - func CalcEthBlockReward(header *types.Header, uncles []*types.Header, txs types.Transactions, receipts types.Receipts) *big.Int { staticBlockReward := staticRewardByBlockNumber(header.Number.Int64()) transactionFees := calcEthTransactionFees(txs, receipts) @@ -94,6 +40,19 @@ func CalcUncleMinerReward(blockNumber, uncleBlockNumber int64) *big.Int { return rewardDiv8.Mul(rewardDiv8, uncleBlockPlus8MinusMainBlock) } +func staticRewardByBlockNumber(blockNumber int64) *big.Int { + staticBlockReward := new(big.Int) + //https://blog.ethereum.org/2017/10/12/byzantium-hf-announcement/ + if blockNumber >= 7280000 { + staticBlockReward.SetString("2000000000000000000", 10) + } else if blockNumber >= 4370000 { + staticBlockReward.SetString("3000000000000000000", 10) + } else { + staticBlockReward.SetString("5000000000000000000", 10) + } + return staticBlockReward +} + func calcEthTransactionFees(txs types.Transactions, receipts types.Receipts) *big.Int { transactionFees := new(big.Int) for i, transaction := range txs { diff --git a/pkg/super_node/eth/streamer.go b/pkg/eth/streamer.go similarity index 97% rename from pkg/super_node/eth/streamer.go rename to pkg/eth/streamer.go index 704b15d7..24a967ee 100644 --- a/pkg/super_node/eth/streamer.go +++ b/pkg/eth/streamer.go @@ -23,7 +23,7 @@ import ( "github.com/ethereum/go-ethereum/statediff" "github.com/sirupsen/logrus" - "github.com/vulcanize/vulcanizedb/pkg/super_node/shared" + "github.com/vulcanize/ipfs-chain-watcher/pkg/shared" ) const ( diff --git a/pkg/super_node/eth/streamer_test.go b/pkg/eth/streamer_test.go similarity index 85% rename from pkg/super_node/eth/streamer_test.go rename to pkg/eth/streamer_test.go index d6c014f6..acbe4dbf 100644 --- a/pkg/super_node/eth/streamer_test.go +++ b/pkg/eth/streamer_test.go @@ -18,9 +18,9 @@ import ( . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" - "github.com/vulcanize/vulcanizedb/libraries/shared/mocks" - "github.com/vulcanize/vulcanizedb/pkg/super_node/eth" - "github.com/vulcanize/vulcanizedb/pkg/super_node/shared" + "github.com/vulcanize/ipfs-chain-watcher/pkg/eth" + "github.com/vulcanize/ipfs-chain-watcher/pkg/eth/mocks" + "github.com/vulcanize/ipfs-chain-watcher/pkg/shared" ) var _ = Describe("StateDiff Streamer", func() { diff --git a/pkg/super_node/eth/subscription_config.go b/pkg/eth/subscription_config.go similarity index 99% rename from pkg/super_node/eth/subscription_config.go rename to pkg/eth/subscription_config.go index d8ac70d0..de873f1b 100644 --- a/pkg/super_node/eth/subscription_config.go +++ b/pkg/eth/subscription_config.go @@ -21,7 +21,7 @@ import ( "github.com/spf13/viper" - "github.com/vulcanize/vulcanizedb/pkg/super_node/shared" + "github.com/vulcanize/ipfs-chain-watcher/pkg/shared" ) // SubscriptionSettings config is used by a subscriber to specify what eth data to stream from the super node diff --git a/pkg/super_node/eth/test_helpers.go b/pkg/eth/test_helpers.go similarity index 97% rename from pkg/super_node/eth/test_helpers.go rename to pkg/eth/test_helpers.go index 333600c9..011c88bd 100644 --- a/pkg/super_node/eth/test_helpers.go +++ b/pkg/eth/test_helpers.go @@ -19,7 +19,7 @@ package eth import ( . "github.com/onsi/gomega" - "github.com/vulcanize/vulcanizedb/pkg/postgres" + "github.com/vulcanize/ipfs-chain-watcher/pkg/postgres" ) // TearDownDB is used to tear down the super node dbs after tests diff --git a/pkg/eth/testing/helpers.go b/pkg/eth/testing/helpers.go deleted file mode 100644 index eb3f9c4d..00000000 --- a/pkg/eth/testing/helpers.go +++ /dev/null @@ -1,39 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package testing - -import ( - "github.com/sirupsen/logrus" - "github.com/vulcanize/vulcanizedb/pkg/eth" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" - "github.com/vulcanize/vulcanizedb/test_config" -) - -func SampleContract() core.Contract { - return core.Contract{ - Abi: sampleAbiFileContents(), - Hash: "0xd26114cd6EE289AccF82350c8d8487fedB8A0C07", - } -} - -func sampleAbiFileContents() string { - abiFileContents, err := eth.ReadAbiFile(test_config.ABIFilePath + "sample_abi.json") - if err != nil { - logrus.Fatal(err) - } - return abiFileContents -} diff --git a/pkg/eth/testing/invalid_abi.json b/pkg/eth/testing/invalid_abi.json deleted file mode 100644 index 2a260b84..00000000 --- a/pkg/eth/testing/invalid_abi.json +++ /dev/null @@ -1 +0,0 @@ -bad json \ No newline at end of file diff --git a/pkg/eth/testing/sample_abi.json b/pkg/eth/testing/sample_abi.json deleted file mode 100644 index a3582a41..00000000 --- a/pkg/eth/testing/sample_abi.json +++ /dev/null @@ -1 +0,0 @@ -[{"constant":true,"inputs":[],"name":"mintingFinished","outputs":[{"name":"","type":"bool"}],"payable":false,"type":"function"},{"constant":true,"inputs":[],"name":"name","outputs":[{"name":"","type":"string"}],"payable":false,"type":"function"},{"constant":false,"inputs":[{"name":"_spender","type":"address"},{"name":"_value","type":"uint256"}],"name":"approve","outputs":[],"payable":false,"type":"function"},{"constant":true,"inputs":[],"name":"totalSupply","outputs":[{"name":"","type":"uint256"}],"payable":false,"type":"function"},{"constant":false,"inputs":[{"name":"_from","type":"address"},{"name":"_to","type":"address"},{"name":"_value","type":"uint256"}],"name":"transferFrom","outputs":[],"payable":false,"type":"function"},{"constant":true,"inputs":[],"name":"decimals","outputs":[{"name":"","type":"uint256"}],"payable":false,"type":"function"},{"constant":false,"inputs":[],"name":"unpause","outputs":[{"name":"","type":"bool"}],"payable":false,"type":"function"},{"constant":false,"inputs":[{"name":"_to","type":"address"},{"name":"_amount","type":"uint256"}],"name":"mint","outputs":[{"name":"","type":"bool"}],"payable":false,"type":"function"},{"constant":true,"inputs":[],"name":"paused","outputs":[{"name":"","type":"bool"}],"payable":false,"type":"function"},{"constant":true,"inputs":[{"name":"_owner","type":"address"}],"name":"balanceOf","outputs":[{"name":"balance","type":"uint256"}],"payable":false,"type":"function"},{"constant":false,"inputs":[],"name":"finishMinting","outputs":[{"name":"","type":"bool"}],"payable":false,"type":"function"},{"constant":false,"inputs":[],"name":"pause","outputs":[{"name":"","type":"bool"}],"payable":false,"type":"function"},{"constant":true,"inputs":[],"name":"owner","outputs":[{"name":"","type":"address"}],"payable":false,"type":"function"},{"constant":true,"inputs":[],"name":"symbol","outputs":[{"name":"","type":"string"}],"payable":false,"type":"function"},{"constant":false,"inputs":[{"name":"_to","type":"address"},{"name":"_value","type":"uint256"}],"name":"transfer","outputs":[],"payable":false,"type":"function"},{"constant":false,"inputs":[{"name":"_to","type":"address"},{"name":"_amount","type":"uint256"},{"name":"_releaseTime","type":"uint256"}],"name":"mintTimelocked","outputs":[{"name":"","type":"address"}],"payable":false,"type":"function"},{"constant":true,"inputs":[{"name":"_owner","type":"address"},{"name":"_spender","type":"address"}],"name":"allowance","outputs":[{"name":"remaining","type":"uint256"}],"payable":false,"type":"function"},{"constant":false,"inputs":[{"name":"newOwner","type":"address"}],"name":"transferOwnership","outputs":[],"payable":false,"type":"function"},{"anonymous":false,"inputs":[{"indexed":true,"name":"to","type":"address"},{"indexed":false,"name":"value","type":"uint256"}],"name":"Mint","type":"event"},{"anonymous":false,"inputs":[],"name":"MintFinished","type":"event"},{"anonymous":false,"inputs":[],"name":"Pause","type":"event"},{"anonymous":false,"inputs":[],"name":"Unpause","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"owner","type":"address"},{"indexed":true,"name":"spender","type":"address"},{"indexed":false,"name":"value","type":"uint256"}],"name":"Approval","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"from","type":"address"},{"indexed":true,"name":"to","type":"address"},{"indexed":false,"name":"value","type":"uint256"}],"name":"Transfer","type":"event"}] \ No newline at end of file diff --git a/pkg/eth/testing/valid_abi.json b/pkg/eth/testing/valid_abi.json deleted file mode 100644 index 5a4cf7e4..00000000 --- a/pkg/eth/testing/valid_abi.json +++ /dev/null @@ -1 +0,0 @@ -[{"foo": "bar"}] \ No newline at end of file diff --git a/pkg/super_node/eth/types.go b/pkg/eth/types.go similarity index 98% rename from pkg/super_node/eth/types.go rename to pkg/eth/types.go index 79c43a7d..7ba18825 100644 --- a/pkg/super_node/eth/types.go +++ b/pkg/eth/types.go @@ -19,12 +19,11 @@ package eth import ( "math/big" - "github.com/ethereum/go-ethereum/statediff" - - "github.com/vulcanize/vulcanizedb/pkg/ipfs" - "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/core/types" + "github.com/ethereum/go-ethereum/statediff" + + "github.com/vulcanize/ipfs-chain-watcher/pkg/ipfs" ) // ConvertedPayload is a custom type which packages raw ETH data for publishing to IPFS and filtering to subscribers diff --git a/pkg/fs/reader.go b/pkg/fs/reader.go deleted file mode 100644 index 05326ae7..00000000 --- a/pkg/fs/reader.go +++ /dev/null @@ -1,29 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package fs - -import "io/ioutil" - -type Reader interface { - Read(path string) ([]byte, error) -} - -type FsReader struct{} - -func (FsReader) Read(path string) ([]byte, error) { - return ioutil.ReadFile(path) -} diff --git a/pkg/fs/tail.go b/pkg/fs/tail.go deleted file mode 100644 index 7c84c692..00000000 --- a/pkg/fs/tail.go +++ /dev/null @@ -1,15 +0,0 @@ -package fs - -import "github.com/hpcloud/tail" - -type Tailer interface { - Tail() (*tail.Tail, error) -} - -type FileTailer struct { - Path string -} - -func (tailer FileTailer) Tail() (*tail.Tail, error) { - return tail.TailFile(tailer.Path, tail.Config{Follow: true}) -} diff --git a/pkg/ipfs/dag_putters/btc_header.go b/pkg/ipfs/dag_putters/btc_header.go index 36aeba42..50628422 100644 --- a/pkg/ipfs/dag_putters/btc_header.go +++ b/pkg/ipfs/dag_putters/btc_header.go @@ -22,8 +22,8 @@ import ( node "github.com/ipfs/go-ipld-format" - "github.com/vulcanize/vulcanizedb/pkg/ipfs" - "github.com/vulcanize/vulcanizedb/pkg/ipfs/ipld" + "github.com/vulcanize/ipfs-chain-watcher/pkg/ipfs" + "github.com/vulcanize/ipfs-chain-watcher/pkg/ipfs/ipld" ) var ( diff --git a/pkg/ipfs/dag_putters/btc_tx.go b/pkg/ipfs/dag_putters/btc_tx.go index 52cc7bdd..24c5fd68 100644 --- a/pkg/ipfs/dag_putters/btc_tx.go +++ b/pkg/ipfs/dag_putters/btc_tx.go @@ -22,8 +22,8 @@ import ( node "github.com/ipfs/go-ipld-format" - "github.com/vulcanize/vulcanizedb/pkg/ipfs" - "github.com/vulcanize/vulcanizedb/pkg/ipfs/ipld" + "github.com/vulcanize/ipfs-chain-watcher/pkg/ipfs" + "github.com/vulcanize/ipfs-chain-watcher/pkg/ipfs/ipld" ) type BtcTxDagPutter struct { diff --git a/pkg/ipfs/dag_putters/btc_tx_trie.go b/pkg/ipfs/dag_putters/btc_tx_trie.go index 039e2d98..a5693488 100644 --- a/pkg/ipfs/dag_putters/btc_tx_trie.go +++ b/pkg/ipfs/dag_putters/btc_tx_trie.go @@ -22,8 +22,8 @@ import ( node "github.com/ipfs/go-ipld-format" - "github.com/vulcanize/vulcanizedb/pkg/ipfs" - "github.com/vulcanize/vulcanizedb/pkg/ipfs/ipld" + "github.com/vulcanize/ipfs-chain-watcher/pkg/ipfs" + "github.com/vulcanize/ipfs-chain-watcher/pkg/ipfs/ipld" ) type BtcTxTrieDagPutter struct { diff --git a/pkg/ipfs/dag_putters/eth_header.go b/pkg/ipfs/dag_putters/eth_header.go index 630db6b3..85476ad8 100644 --- a/pkg/ipfs/dag_putters/eth_header.go +++ b/pkg/ipfs/dag_putters/eth_header.go @@ -22,8 +22,8 @@ import ( node "github.com/ipfs/go-ipld-format" - "github.com/vulcanize/vulcanizedb/pkg/ipfs" - "github.com/vulcanize/vulcanizedb/pkg/ipfs/ipld" + "github.com/vulcanize/ipfs-chain-watcher/pkg/ipfs" + "github.com/vulcanize/ipfs-chain-watcher/pkg/ipfs/ipld" ) type EthHeaderDagPutter struct { diff --git a/pkg/ipfs/dag_putters/eth_receipt.go b/pkg/ipfs/dag_putters/eth_receipt.go index c099f8c8..2371905b 100644 --- a/pkg/ipfs/dag_putters/eth_receipt.go +++ b/pkg/ipfs/dag_putters/eth_receipt.go @@ -22,8 +22,8 @@ import ( node "github.com/ipfs/go-ipld-format" - "github.com/vulcanize/vulcanizedb/pkg/ipfs" - "github.com/vulcanize/vulcanizedb/pkg/ipfs/ipld" + "github.com/vulcanize/ipfs-chain-watcher/pkg/ipfs" + "github.com/vulcanize/ipfs-chain-watcher/pkg/ipfs/ipld" ) type EthReceiptDagPutter struct { diff --git a/pkg/ipfs/dag_putters/eth_receipt_trie.go b/pkg/ipfs/dag_putters/eth_receipt_trie.go index 6d982b3a..2f84d233 100644 --- a/pkg/ipfs/dag_putters/eth_receipt_trie.go +++ b/pkg/ipfs/dag_putters/eth_receipt_trie.go @@ -22,8 +22,8 @@ import ( node "github.com/ipfs/go-ipld-format" - "github.com/vulcanize/vulcanizedb/pkg/ipfs" - "github.com/vulcanize/vulcanizedb/pkg/ipfs/ipld" + "github.com/vulcanize/ipfs-chain-watcher/pkg/ipfs" + "github.com/vulcanize/ipfs-chain-watcher/pkg/ipfs/ipld" ) type EthRctTrieDagPutter struct { diff --git a/pkg/ipfs/dag_putters/eth_state.go b/pkg/ipfs/dag_putters/eth_state.go index 9bfdbe44..26f39f21 100644 --- a/pkg/ipfs/dag_putters/eth_state.go +++ b/pkg/ipfs/dag_putters/eth_state.go @@ -22,8 +22,8 @@ import ( node "github.com/ipfs/go-ipld-format" - "github.com/vulcanize/vulcanizedb/pkg/ipfs" - "github.com/vulcanize/vulcanizedb/pkg/ipfs/ipld" + "github.com/vulcanize/ipfs-chain-watcher/pkg/ipfs" + "github.com/vulcanize/ipfs-chain-watcher/pkg/ipfs/ipld" ) type EthStateDagPutter struct { diff --git a/pkg/ipfs/dag_putters/eth_storage.go b/pkg/ipfs/dag_putters/eth_storage.go index 828de1f9..4cc1bfc3 100644 --- a/pkg/ipfs/dag_putters/eth_storage.go +++ b/pkg/ipfs/dag_putters/eth_storage.go @@ -22,8 +22,8 @@ import ( node "github.com/ipfs/go-ipld-format" - "github.com/vulcanize/vulcanizedb/pkg/ipfs" - "github.com/vulcanize/vulcanizedb/pkg/ipfs/ipld" + "github.com/vulcanize/ipfs-chain-watcher/pkg/ipfs" + "github.com/vulcanize/ipfs-chain-watcher/pkg/ipfs/ipld" ) type EthStorageDagPutter struct { diff --git a/pkg/ipfs/dag_putters/eth_tx.go b/pkg/ipfs/dag_putters/eth_tx.go index 906e1abf..5800a5e4 100644 --- a/pkg/ipfs/dag_putters/eth_tx.go +++ b/pkg/ipfs/dag_putters/eth_tx.go @@ -22,8 +22,8 @@ import ( node "github.com/ipfs/go-ipld-format" - "github.com/vulcanize/vulcanizedb/pkg/ipfs" - "github.com/vulcanize/vulcanizedb/pkg/ipfs/ipld" + "github.com/vulcanize/ipfs-chain-watcher/pkg/ipfs" + "github.com/vulcanize/ipfs-chain-watcher/pkg/ipfs/ipld" ) type EthTxsDagPutter struct { diff --git a/pkg/ipfs/dag_putters/eth_tx_trie.go b/pkg/ipfs/dag_putters/eth_tx_trie.go index f29478a2..4a6ec004 100644 --- a/pkg/ipfs/dag_putters/eth_tx_trie.go +++ b/pkg/ipfs/dag_putters/eth_tx_trie.go @@ -22,8 +22,8 @@ import ( node "github.com/ipfs/go-ipld-format" - "github.com/vulcanize/vulcanizedb/pkg/ipfs" - "github.com/vulcanize/vulcanizedb/pkg/ipfs/ipld" + "github.com/vulcanize/ipfs-chain-watcher/pkg/ipfs" + "github.com/vulcanize/ipfs-chain-watcher/pkg/ipfs/ipld" ) type EthTxTrieDagPutter struct { diff --git a/pkg/plugin/builder/builder.go b/pkg/plugin/builder/builder.go deleted file mode 100644 index d045dff5..00000000 --- a/pkg/plugin/builder/builder.go +++ /dev/null @@ -1,131 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package builder - -import ( - "fmt" - "os" - "os/exec" - "path/filepath" - - "github.com/vulcanize/vulcanizedb/pkg/config" - "github.com/vulcanize/vulcanizedb/pkg/plugin/helpers" -) - -// Interface for compile Go code written by the -// PluginWriter into a shared object (.so file) -// which can be used loaded as a plugin -type PluginBuilder interface { - BuildPlugin() error - CleanUp() error -} - -type builder struct { - GenConfig config.Plugin - dependencies []string - tmpVenDirs []string // Keep track of temp vendor directories - goFile string // Keep track of goFile name -} - -// Requires populated plugin config -func NewPluginBuilder(gc config.Plugin) PluginBuilder { - return &builder{ - GenConfig: gc, - tmpVenDirs: make([]string, 0), - } -} - -func (b *builder) BuildPlugin() error { - // Get plugin .go and .so file paths - var err error - var soFile string - b.goFile, soFile, err = b.GenConfig.GetPluginPaths() - if err != nil { - return err - } - - // setup env to build plugin - setupErr := b.setupBuildEnv() - if setupErr != nil { - return setupErr - } - - // Build the .go file into a .so plugin - execErr := exec.Command("go", "build", "-buildmode=plugin", "-o", soFile, b.goFile).Run() - if execErr != nil { - return fmt.Errorf("unable to build .so file: %s", execErr.Error()) - } - return nil -} - -// Sets up temporary vendor libs needed for plugin build -// This is to work around a conflict between plugins and vendoring (https://github.com/golang/go/issues/20481) -func (b *builder) setupBuildEnv() error { - // TODO: Less hacky way of handling plugin build deps - vendorPath, err := helpers.CleanPath(filepath.Join("$GOPATH/src", b.GenConfig.Home, "vendor")) - if err != nil { - return err - } - - repoPaths := b.GenConfig.GetRepoPaths() - - // Import transformer dependencies so that we can build our plugin - for importPath := range repoPaths { - dst := filepath.Join(vendorPath, importPath) - src, cleanErr := helpers.CleanPath(filepath.Join("$GOPATH/src", importPath)) - if cleanErr != nil { - return cleanErr - } - - copyErr := helpers.CopyDir(src, dst, "vendor") - if copyErr != nil { - return fmt.Errorf("unable to copy transformer dependency from %s to %s: %v", src, dst, copyErr) - } - - // Have to clear out the copied over vendor lib or plugin won't build (see issue above) - removeErr := os.RemoveAll(filepath.Join(dst, "vendor")) - if removeErr != nil { - return removeErr - } - // Keep track of this vendor directory to clear later - b.tmpVenDirs = append(b.tmpVenDirs, dst) - } - - return nil -} - -// Used to clear all of the tmp vendor libs used to build the plugin -// Also clears the go file if saving it has not been specified in the config -// Do not call until after the MigrationManager has performed its operations -// as it needs to pull the db migrations from the tmpVenDirs -func (b *builder) CleanUp() error { - if !b.GenConfig.Save { - err := helpers.ClearFiles(b.goFile) - if err != nil { - return err - } - } - - for _, venDir := range b.tmpVenDirs { - err := os.RemoveAll(venDir) - if err != nil { - return err - } - } - - return nil -} diff --git a/pkg/plugin/generator.go b/pkg/plugin/generator.go deleted file mode 100644 index 193ba7bc..00000000 --- a/pkg/plugin/generator.go +++ /dev/null @@ -1,69 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package plugin - -import ( - "errors" - - "github.com/vulcanize/vulcanizedb/pkg/config" - "github.com/vulcanize/vulcanizedb/pkg/plugin/builder" - "github.com/vulcanize/vulcanizedb/pkg/plugin/manager" - "github.com/vulcanize/vulcanizedb/pkg/plugin/writer" -) - -// Generator is the top-level interface for creating transformer plugins -type Generator interface { - GenerateExporterPlugin() error -} - -type generator struct { - writer.PluginWriter - builder.PluginBuilder - manager.MigrationManager -} - -// Creates a new generator from a plugin and database config -func NewGenerator(gc config.Plugin, dbc config.Database) (Generator, error) { - if len(gc.Transformers) < 1 { - return nil, errors.New("plugin generator is not configured with any transformers") - } - return &generator{ - PluginWriter: writer.NewPluginWriter(gc), - PluginBuilder: builder.NewPluginBuilder(gc), - MigrationManager: manager.NewMigrationManager(gc, dbc), - }, nil -} - -// Generates plugin for the transformer initializers specified in the generator config -// Writes plugin code => Sets up build environment => Builds .so file => Performs db migrations for the plugin transformers => Clean up -func (g *generator) GenerateExporterPlugin() error { - // Use plugin writer interface to write the plugin code - err := g.PluginWriter.WritePlugin() - if err != nil { - return err - } - // Clean up temporary files and directories when we are done - defer g.PluginBuilder.CleanUp() - // Use plugin builder interface to setup build environment and compile .go file into a .so file - err = g.PluginBuilder.BuildPlugin() - if err != nil { - return err - } - - // Perform db migrations for the transformers - return g.MigrationManager.RunMigrations() -} diff --git a/pkg/plugin/helpers/helpers.go b/pkg/plugin/helpers/helpers.go deleted file mode 100644 index a3904d3c..00000000 --- a/pkg/plugin/helpers/helpers.go +++ /dev/null @@ -1,116 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package helpers - -import ( - "io" - "io/ioutil" - "os" - "path/filepath" - "strings" - "syscall" - - "github.com/mitchellh/go-homedir" -) - -func CleanPath(str string) (string, error) { - path, err := homedir.Expand(filepath.Clean(str)) - if err != nil { - return "", err - } - if strings.Contains(path, "$GOPATH") { - env := os.Getenv("GOPATH") - spl := strings.Split(path, "$GOPATH")[1] - path = filepath.Join(env, spl) - } - - return path, nil -} - -func ClearFiles(files ...string) error { - for _, file := range files { - if _, err := os.Stat(file); err == nil { - err = os.Remove(file) - if err != nil { - return err - } - } else if os.IsNotExist(err) { - continue - } else { - return err - } - } - - return nil -} - -func CopyFile(src, dst string) error { - in, err := os.Open(src) - if err != nil { - return err - } - out, err := os.OpenFile(dst, syscall.O_CREAT|syscall.O_EXCL|os.O_WRONLY, os.FileMode(0666)) // Doesn't overwrite files - if err != nil { - in.Close() - return err - } - - _, err = io.Copy(out, in) - in.Close() - out.Close() - return err -} - -func CopyDir(src string, dst string, excludeRecursiveDir string) error { - var err error - var fds []os.FileInfo - var srcinfo os.FileInfo - - srcinfo, err = os.Stat(src) - if err != nil { - return err - } - - mkErr := os.MkdirAll(dst, srcinfo.Mode()) - if mkErr != nil { - return mkErr - } - - fds, readErr := ioutil.ReadDir(src) - if err != readErr { - return readErr - } - for _, fd := range fds { - srcfp := filepath.Join(src, fd.Name()) - dstfp := filepath.Join(dst, fd.Name()) - - if fd.IsDir() { - if fd.Name() != excludeRecursiveDir { - dirErr := CopyDir(srcfp, dstfp, "") - if dirErr != nil { - return dirErr - } - } - } else { - fileErr := CopyFile(srcfp, dstfp) - if fileErr != nil { - return fileErr - } - } - } - return nil -} diff --git a/pkg/plugin/manager/manager.go b/pkg/plugin/manager/manager.go deleted file mode 100644 index 4db86477..00000000 --- a/pkg/plugin/manager/manager.go +++ /dev/null @@ -1,167 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package manager - -import ( - "database/sql" - "fmt" - "io/ioutil" - "os" - "path/filepath" - - "github.com/lib/pq" - "github.com/pressly/goose" - - "github.com/vulcanize/vulcanizedb/pkg/config" - "github.com/vulcanize/vulcanizedb/pkg/plugin/helpers" -) - -// Interface for managing the db migrations for plugin transformers -type MigrationManager interface { - RunMigrations() error -} - -type manager struct { - GenConfig config.Plugin - DBConfig config.Database - tmpMigDir string - db *sql.DB -} - -// Manager requires both filled in generator and database configs -func NewMigrationManager(gc config.Plugin, dbc config.Database) MigrationManager { - return &manager{ - GenConfig: gc, - DBConfig: dbc, - } -} - -func (m *manager) setDB() error { - var pgStr string - if len(m.DBConfig.User) > 0 && len(m.DBConfig.Password) > 0 { - pgStr = fmt.Sprintf("postgresql://%s:%s@%s:%d/%s?sslmode=disable", - m.DBConfig.User, m.DBConfig.Password, m.DBConfig.Hostname, m.DBConfig.Port, m.DBConfig.Name) - } else { - pgStr = fmt.Sprintf("postgres://%s:%d/%s?sslmode=disable", m.DBConfig.Hostname, m.DBConfig.Port, m.DBConfig.Name) - } - dbConnector, err := pq.NewConnector(pgStr) - if err != nil { - return fmt.Errorf("can't connect to db: %s", err.Error()) - } - m.db = sql.OpenDB(dbConnector) - return nil -} - -func (m *manager) RunMigrations() error { - // Get paths to db migrations from the plugin config - paths, err := m.GenConfig.GetMigrationsPaths() - if err != nil { - return err - } - if len(paths) < 1 { - return nil - } - // Init directory for temporary copies of migrations - err = m.setupMigrationEnv() - if err != nil { - return err - } - defer m.cleanUp() - // Creates copies of migrations for all the plugin's transformers in a tmp dir - err = m.createMigrationCopies(paths) - if err != nil { - return err - } - - return nil -} - -// Setup a temporary directory to hold transformer db migrations -func (m *manager) setupMigrationEnv() error { - var err error - m.tmpMigDir, err = helpers.CleanPath(filepath.Join("$GOPATH/src", m.GenConfig.Home, ".plugin_migrations")) - if err != nil { - return err - } - removeErr := os.RemoveAll(m.tmpMigDir) - if removeErr != nil { - removeErrString := "unable to remove file found at %s where tmp directory needs to be written: %s" - return fmt.Errorf(removeErrString, m.tmpMigDir, removeErr.Error()) - } - mkdirErr := os.Mkdir(m.tmpMigDir, os.FileMode(os.ModePerm)) - if mkdirErr != nil { - mkdirErrString := "unable to create temporary migration directory %s: %s" - return fmt.Errorf(mkdirErrString, m.tmpMigDir, mkdirErr.Error()) - } - - return nil -} - -// Create copies of db migrations from vendored libs -func (m *manager) createMigrationCopies(paths []string) error { - // Iterate through migration paths to find migration directory - for _, path := range paths { - dir, err := ioutil.ReadDir(path) - if err != nil { - return err - } - // For each file in the directory check if it is a migration - for _, file := range dir { - if file.IsDir() || filepath.Ext(file.Name()) != ".sql" { - continue - } - src := filepath.Join(path, file.Name()) - dst := filepath.Join(m.tmpMigDir, file.Name()) - // and if it is make a copy of it to our tmp migration directory - err = helpers.CopyFile(src, dst) - if err != nil { - return err - } - } - err = m.fixAndRun(path) - if err != nil { - return err - } - } - - return nil -} - -func (m *manager) fixAndRun(path string) error { - // Setup DB if not set - if m.db == nil { - setErr := m.setDB() - if setErr != nil { - return fmt.Errorf("could not open db: %s", setErr.Error()) - } - } - // Fix the migrations - fixErr := goose.Fix(m.tmpMigDir) - if fixErr != nil { - return fmt.Errorf("version fixing for plugin migrations at %s failed: %s", path, fixErr.Error()) - } - // Run the copied migrations with goose - upErr := goose.Up(m.db, m.tmpMigDir) - if upErr != nil { - return fmt.Errorf("db migrations for plugin transformers at %s failed: %s", path, upErr.Error()) - } - return nil -} - -func (m *manager) cleanUp() error { - return os.RemoveAll(m.tmpMigDir) -} diff --git a/pkg/plugin/writer/writer.go b/pkg/plugin/writer/writer.go deleted file mode 100644 index e56a477d..00000000 --- a/pkg/plugin/writer/writer.go +++ /dev/null @@ -1,124 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package writer - -import ( - "fmt" - - . "github.com/dave/jennifer/jen" - - "github.com/vulcanize/vulcanizedb/pkg/config" - "github.com/vulcanize/vulcanizedb/pkg/plugin/helpers" -) - -// Interface for writing a .go file for a simple -// plugin that exports the set of transformer -// initializers specified in the config -type PluginWriter interface { - WritePlugin() error -} - -type writer struct { - GenConfig config.Plugin -} - -// Requires populated plugin config -func NewPluginWriter(gc config.Plugin) PluginWriter { - return &writer{ - GenConfig: gc, - } -} - -// Generates the plugin code according to config specification -func (w *writer) WritePlugin() error { - // Setup plugin file paths - goFile, err := w.setupFilePath() - if err != nil { - return err - } - - // Begin code generation - f := NewFile("main") - f.HeaderComment("This is a plugin generated to export the configured transformer initializers") - - // Import pkgs for generic TransformerInitializer interface and specific TransformerInitializers specified in config - f.ImportAlias("github.com/vulcanize/vulcanizedb/libraries/shared/transformer", "interface") - for name, transformer := range w.GenConfig.Transformers { - f.ImportAlias(transformer.RepositoryPath+"/"+transformer.Path, name) - } - - // Collect initializer code - code, err := w.collectTransformers() - if err != nil { - return err - } - - // Create Exporter variable with method to export the set of the imported storage and event transformer initializers - f.Type().Id("exporter").String() - f.Var().Id("Exporter").Id("exporter") - f.Func().Params(Id("e").Id("exporter")).Id("Export").Params().Parens(List( - Index().Qual("github.com/vulcanize/vulcanizedb/libraries/shared/transformer", "EventTransformerInitializer"), - Index().Qual("github.com/vulcanize/vulcanizedb/libraries/shared/transformer", "StorageTransformerInitializer"), - Index().Qual("github.com/vulcanize/vulcanizedb/libraries/shared/transformer", "ContractTransformerInitializer"), - )).Block(Return( - Index().Qual( - "github.com/vulcanize/vulcanizedb/libraries/shared/transformer", - "EventTransformerInitializer").Values(code[config.EthEvent]...), - Index().Qual( - "github.com/vulcanize/vulcanizedb/libraries/shared/transformer", - "StorageTransformerInitializer").Values(code[config.EthStorage]...), - Index().Qual( - "github.com/vulcanize/vulcanizedb/libraries/shared/transformer", - "ContractTransformerInitializer").Values(code[config.EthContract]...))) // Exports the collected event and storage transformer initializers - - // Write code to destination file - err = f.Save(goFile) - if err != nil { - return fmt.Errorf("failed to save generated .go file: %s\r\n%s", goFile, err.Error()) - } - return nil -} - -// Collect code for various types of initializers -func (w *writer) collectTransformers() (map[config.TransformerType][]Code, error) { - code := make(map[config.TransformerType][]Code) - for _, transformer := range w.GenConfig.Transformers { - path := transformer.RepositoryPath + "/" + transformer.Path - switch transformer.Type { - case config.EthEvent: - code[config.EthEvent] = append(code[config.EthEvent], Qual(path, "EventTransformerInitializer")) - case config.EthStorage: - code[config.EthStorage] = append(code[config.EthStorage], Qual(path, "StorageTransformerInitializer")) - case config.EthContract: - code[config.EthContract] = append(code[config.EthContract], Qual(path, "ContractTransformerInitializer")) - default: - return nil, fmt.Errorf("invalid transformer type %s", transformer.Type) - } - } - - return code, nil -} - -// Setup the .go, clear old ones if present -func (w *writer) setupFilePath() (string, error) { - goFile, soFile, err := w.GenConfig.GetPluginPaths() - if err != nil { - return "", err - } - // Clear .go and .so files of the same name if they exist - return goFile, helpers.ClearFiles(goFile, soFile) -} diff --git a/pkg/postgres/postgres.go b/pkg/postgres/postgres.go index 812cbfd5..07e9f9cd 100644 --- a/pkg/postgres/postgres.go +++ b/pkg/postgres/postgres.go @@ -21,8 +21,8 @@ import ( "github.com/jmoiron/sqlx" _ "github.com/lib/pq" //postgres driver - "github.com/vulcanize/vulcanizedb/pkg/config" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" + "github.com/vulcanize/ipfs-chain-watcher/pkg/config" + "github.com/vulcanize/ipfs-chain-watcher/pkg/core" ) type DB struct { diff --git a/pkg/postgres/postgres_test.go b/pkg/postgres/postgres_test.go index b927c1ed..1517843d 100644 --- a/pkg/postgres/postgres_test.go +++ b/pkg/postgres/postgres_test.go @@ -26,10 +26,10 @@ import ( _ "github.com/lib/pq" . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" - "github.com/vulcanize/vulcanizedb/pkg/config" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" - "github.com/vulcanize/vulcanizedb/pkg/postgres" - "github.com/vulcanize/vulcanizedb/test_config" + "github.com/vulcanize/ipfs-chain-watcher/pkg/config" + "github.com/vulcanize/ipfs-chain-watcher/pkg/core" + "github.com/vulcanize/ipfs-chain-watcher/pkg/postgres" + "github.com/vulcanize/ipfs-chain-watcher/test_config" ) var _ = Describe("Postgres DB", func() { diff --git a/pkg/super_node/resync/config.go b/pkg/resync/config.go similarity index 94% rename from pkg/super_node/resync/config.go rename to pkg/resync/config.go index 22b77b83..7a34b749 100644 --- a/pkg/super_node/resync/config.go +++ b/pkg/resync/config.go @@ -22,11 +22,11 @@ import ( "github.com/spf13/viper" - "github.com/vulcanize/vulcanizedb/pkg/config" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" - "github.com/vulcanize/vulcanizedb/pkg/postgres" - "github.com/vulcanize/vulcanizedb/pkg/super_node/shared" - "github.com/vulcanize/vulcanizedb/utils" + "github.com/vulcanize/ipfs-chain-watcher/pkg/config" + "github.com/vulcanize/ipfs-chain-watcher/pkg/core" + "github.com/vulcanize/ipfs-chain-watcher/pkg/postgres" + "github.com/vulcanize/ipfs-chain-watcher/pkg/shared" + "github.com/vulcanize/ipfs-chain-watcher/utils" ) // Env variables diff --git a/pkg/super_node/resync/service.go b/pkg/resync/service.go similarity index 87% rename from pkg/super_node/resync/service.go rename to pkg/resync/service.go index 4a613a20..b2315ec2 100644 --- a/pkg/super_node/resync/service.go +++ b/pkg/resync/service.go @@ -18,11 +18,12 @@ package resync import ( "fmt" + "github.com/sirupsen/logrus" - utils "github.com/vulcanize/vulcanizedb/libraries/shared/utilities" - "github.com/vulcanize/vulcanizedb/pkg/super_node" - "github.com/vulcanize/vulcanizedb/pkg/super_node/shared" + "github.com/vulcanize/ipfs-chain-watcher/pkg/shared" + "github.com/vulcanize/ipfs-chain-watcher/pkg/watcher" + "github.com/vulcanize/ipfs-chain-watcher/utils" ) type Resync interface { @@ -62,37 +63,37 @@ type Service struct { // NewResyncService creates and returns a resync service from the provided settings func NewResyncService(settings *Config) (Resync, error) { - publisher, err := super_node.NewIPLDPublisher(settings.Chain, settings.IPFSPath, settings.DB, settings.IPFSMode) + publisher, err := watcher.NewIPLDPublisher(settings.Chain, settings.IPFSPath, settings.DB, settings.IPFSMode) if err != nil { return nil, err } - indexer, err := super_node.NewCIDIndexer(settings.Chain, settings.DB, settings.IPFSMode) + indexer, err := watcher.NewCIDIndexer(settings.Chain, settings.DB, settings.IPFSMode) if err != nil { return nil, err } - converter, err := super_node.NewPayloadConverter(settings.Chain) + converter, err := watcher.NewPayloadConverter(settings.Chain) if err != nil { return nil, err } - retriever, err := super_node.NewCIDRetriever(settings.Chain, settings.DB) + retriever, err := watcher.NewCIDRetriever(settings.Chain, settings.DB) if err != nil { return nil, err } - fetcher, err := super_node.NewPaylaodFetcher(settings.Chain, settings.HTTPClient, settings.Timeout) + fetcher, err := watcher.NewPaylaodFetcher(settings.Chain, settings.HTTPClient, settings.Timeout) if err != nil { return nil, err } - cleaner, err := super_node.NewCleaner(settings.Chain, settings.DB) + cleaner, err := watcher.NewCleaner(settings.Chain, settings.DB) if err != nil { return nil, err } batchSize := settings.BatchSize if batchSize == 0 { - batchSize = super_node.DefaultMaxBatchSize + batchSize = watcher.DefaultMaxBatchSize } batchNumber := int64(settings.BatchNumber) if batchNumber == 0 { - batchNumber = super_node.DefaultMaxBatchNumber + batchNumber = watcher.DefaultMaxBatchNumber } return &Service{ Indexer: indexer, diff --git a/pkg/super_node/shared/chain_type.go b/pkg/shared/chain_type.go similarity index 100% rename from pkg/super_node/shared/chain_type.go rename to pkg/shared/chain_type.go diff --git a/pkg/super_node/shared/data_type.go b/pkg/shared/data_type.go similarity index 100% rename from pkg/super_node/shared/data_type.go rename to pkg/shared/data_type.go diff --git a/pkg/super_node/shared/env.go b/pkg/shared/env.go similarity index 98% rename from pkg/super_node/shared/env.go rename to pkg/shared/env.go index b49a54c3..a50aad13 100644 --- a/pkg/super_node/shared/env.go +++ b/pkg/shared/env.go @@ -24,7 +24,7 @@ import ( "github.com/btcsuite/btcd/rpcclient" "github.com/spf13/viper" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" + "github.com/vulcanize/ipfs-chain-watcher/pkg/core" ) // Env variables diff --git a/pkg/super_node/shared/functions.go b/pkg/shared/functions.go similarity index 97% rename from pkg/super_node/shared/functions.go rename to pkg/shared/functions.go index d36231e0..bf08c8c5 100644 --- a/pkg/super_node/shared/functions.go +++ b/pkg/shared/functions.go @@ -19,7 +19,7 @@ package shared import ( "bytes" - "github.com/vulcanize/vulcanizedb/pkg/ipfs/ipld" + "github.com/vulcanize/ipfs-chain-watcher/pkg/ipfs/ipld" "github.com/ipfs/go-cid" @@ -30,7 +30,7 @@ import ( "github.com/jmoiron/sqlx" "github.com/sirupsen/logrus" - "github.com/vulcanize/vulcanizedb/pkg/ipfs" + "github.com/vulcanize/ipfs-chain-watcher/pkg/ipfs" ) // ListContainsString used to check if a list of strings contains a particular string diff --git a/pkg/super_node/shared/intefaces.go b/pkg/shared/intefaces.go similarity index 100% rename from pkg/super_node/shared/intefaces.go rename to pkg/shared/intefaces.go diff --git a/pkg/super_node/shared/ipfs_mode.go b/pkg/shared/ipfs_mode.go similarity index 100% rename from pkg/super_node/shared/ipfs_mode.go rename to pkg/shared/ipfs_mode.go diff --git a/pkg/super_node/shared/mocks/payload_fetcher.go b/pkg/shared/mocks/payload_fetcher.go similarity index 96% rename from pkg/super_node/shared/mocks/payload_fetcher.go rename to pkg/shared/mocks/payload_fetcher.go index 589a50ee..d576c12e 100644 --- a/pkg/super_node/shared/mocks/payload_fetcher.go +++ b/pkg/shared/mocks/payload_fetcher.go @@ -20,7 +20,7 @@ import ( "errors" "sync/atomic" - "github.com/vulcanize/vulcanizedb/pkg/super_node/shared" + "github.com/vulcanize/ipfs-chain-watcher/pkg/shared" ) // PayloadFetcher mock for tests diff --git a/pkg/super_node/shared/mocks/retriever.go b/pkg/shared/mocks/retriever.go similarity index 94% rename from pkg/super_node/shared/mocks/retriever.go rename to pkg/shared/mocks/retriever.go index d899d0b2..16e65b71 100644 --- a/pkg/super_node/shared/mocks/retriever.go +++ b/pkg/shared/mocks/retriever.go @@ -17,8 +17,8 @@ package mocks import ( - "github.com/vulcanize/vulcanizedb/pkg/postgres" - "github.com/vulcanize/vulcanizedb/pkg/super_node/shared" + "github.com/vulcanize/ipfs-chain-watcher/pkg/postgres" + "github.com/vulcanize/ipfs-chain-watcher/pkg/shared" ) // CIDRetriever is a mock CID retriever for use in tests diff --git a/pkg/super_node/shared/mocks/streamer.go b/pkg/shared/mocks/streamer.go similarity index 95% rename from pkg/super_node/shared/mocks/streamer.go rename to pkg/shared/mocks/streamer.go index 6b9d2774..dddd45d0 100644 --- a/pkg/super_node/shared/mocks/streamer.go +++ b/pkg/shared/mocks/streamer.go @@ -18,7 +18,7 @@ package mocks import ( "github.com/ethereum/go-ethereum/rpc" - "github.com/vulcanize/vulcanizedb/pkg/super_node/shared" + "github.com/vulcanize/ipfs-chain-watcher/pkg/shared" ) // PayloadStreamer mock struct diff --git a/pkg/super_node/shared/test_helpers.go b/pkg/shared/test_helpers.go similarity index 85% rename from pkg/super_node/shared/test_helpers.go rename to pkg/shared/test_helpers.go index 81cc6e3b..a18fe421 100644 --- a/pkg/super_node/shared/test_helpers.go +++ b/pkg/shared/test_helpers.go @@ -17,9 +17,9 @@ package shared import ( - "github.com/vulcanize/vulcanizedb/pkg/config" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" - "github.com/vulcanize/vulcanizedb/pkg/postgres" + "github.com/vulcanize/ipfs-chain-watcher/pkg/config" + "github.com/vulcanize/ipfs-chain-watcher/pkg/core" + "github.com/vulcanize/ipfs-chain-watcher/pkg/postgres" ) // SetupDB is use to setup a db for super node tests diff --git a/pkg/super_node/shared/types.go b/pkg/shared/types.go similarity index 100% rename from pkg/super_node/shared/types.go rename to pkg/shared/types.go diff --git a/libraries/shared/streamer/super_node_streamer.go b/pkg/streamer/super_node_streamer.go similarity index 85% rename from libraries/shared/streamer/super_node_streamer.go rename to pkg/streamer/super_node_streamer.go index e57f2b44..aa0a36ab 100644 --- a/libraries/shared/streamer/super_node_streamer.go +++ b/pkg/streamer/super_node_streamer.go @@ -20,8 +20,8 @@ package streamer import ( "github.com/ethereum/go-ethereum/rpc" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" - "github.com/vulcanize/vulcanizedb/pkg/super_node" + "github.com/vulcanize/ipfs-chain-watcher/pkg/core" + "github.com/vulcanize/ipfs-chain-watcher/pkg/watcher" ) // SuperNodeStreamer is the underlying struct for the shared.SuperNodeStreamer interface @@ -37,6 +37,6 @@ func NewSuperNodeStreamer(client core.RPCClient) *SuperNodeStreamer { } // Stream is the main loop for subscribing to data from a vulcanizedb super node -func (sds *SuperNodeStreamer) Stream(payloadChan chan super_node.SubscriptionPayload, rlpParams []byte) (*rpc.ClientSubscription, error) { +func (sds *SuperNodeStreamer) Stream(payloadChan chan watcher.SubscriptionPayload, rlpParams []byte) (*rpc.ClientSubscription, error) { return sds.Client.Subscribe("vdb", payloadChan, "stream", rlpParams) } diff --git a/pkg/super_node/config.go b/pkg/super_node/config.go deleted file mode 100644 index ba582160..00000000 --- a/pkg/super_node/config.go +++ /dev/null @@ -1,269 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package super_node - -import ( - "fmt" - "os" - "path/filepath" - "time" - - "github.com/spf13/viper" - - "github.com/vulcanize/vulcanizedb/pkg/config" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" - "github.com/vulcanize/vulcanizedb/pkg/postgres" - "github.com/vulcanize/vulcanizedb/pkg/super_node/shared" - "github.com/vulcanize/vulcanizedb/utils" -) - -// Env variables -const ( - SUPERNODE_CHAIN = "SUPERNODE_CHAIN" - SUPERNODE_SYNC = "SUPERNODE_SYNC" - SUPERNODE_WORKERS = "SUPERNODE_WORKERS" - SUPERNODE_SERVER = "SUPERNODE_SERVER" - SUPERNODE_WS_PATH = "SUPERNODE_WS_PATH" - SUPERNODE_IPC_PATH = "SUPERNODE_IPC_PATH" - SUPERNODE_HTTP_PATH = "SUPERNODE_HTTP_PATH" - SUPERNODE_BACKFILL = "SUPERNODE_BACKFILL" - SUPERNODE_FREQUENCY = "SUPERNODE_FREQUENCY" - SUPERNODE_BATCH_SIZE = "SUPERNODE_BATCH_SIZE" - SUPERNODE_BATCH_NUMBER = "SUPERNODE_BATCH_NUMBER" - SUPERNODE_VALIDATION_LEVEL = "SUPERNODE_VALIDATION_LEVEL" - - SYNC_MAX_IDLE_CONNECTIONS = "SYNC_MAX_IDLE_CONNECTIONS" - SYNC_MAX_OPEN_CONNECTIONS = "SYNC_MAX_OPEN_CONNECTIONS" - SYNC_MAX_CONN_LIFETIME = "SYNC_MAX_CONN_LIFETIME" - - BACKFILL_MAX_IDLE_CONNECTIONS = "BACKFILL_MAX_IDLE_CONNECTIONS" - BACKFILL_MAX_OPEN_CONNECTIONS = "BACKFILL_MAX_OPEN_CONNECTIONS" - BACKFILL_MAX_CONN_LIFETIME = "BACKFILL_MAX_CONN_LIFETIME" - - SERVER_MAX_IDLE_CONNECTIONS = "SERVER_MAX_IDLE_CONNECTIONS" - SERVER_MAX_OPEN_CONNECTIONS = "SERVER_MAX_OPEN_CONNECTIONS" - SERVER_MAX_CONN_LIFETIME = "SERVER_MAX_CONN_LIFETIME" -) - -// Config struct -type Config struct { - // Ubiquitous fields - Chain shared.ChainType - IPFSPath string - IPFSMode shared.IPFSMode - DBConfig config.Database - // Server fields - Serve bool - ServeDBConn *postgres.DB - WSEndpoint string - HTTPEndpoint string - IPCEndpoint string - // Sync params - Sync bool - SyncDBConn *postgres.DB - Workers int - WSClient interface{} - NodeInfo core.Node - // Backfiller params - BackFill bool - BackFillDBConn *postgres.DB - HTTPClient interface{} - Frequency time.Duration - BatchSize uint64 - BatchNumber uint64 - ValidationLevel int - Timeout time.Duration // HTTP connection timeout in seconds -} - -// NewSuperNodeConfig is used to initialize a SuperNode config from a .toml file -// Separate chain supernode instances need to be ran with separate ipfs path in order to avoid lock contention on the ipfs repository lockfile -func NewSuperNodeConfig() (*Config, error) { - c := new(Config) - var err error - - viper.BindEnv("superNode.chain", SUPERNODE_CHAIN) - viper.BindEnv("superNode.sync", SUPERNODE_SYNC) - viper.BindEnv("superNode.workers", SUPERNODE_WORKERS) - viper.BindEnv("ethereum.wsPath", shared.ETH_WS_PATH) - viper.BindEnv("bitcoin.wsPath", shared.BTC_WS_PATH) - viper.BindEnv("superNode.server", SUPERNODE_SERVER) - viper.BindEnv("superNode.wsPath", SUPERNODE_WS_PATH) - viper.BindEnv("superNode.ipcPath", SUPERNODE_IPC_PATH) - viper.BindEnv("superNode.httpPath", SUPERNODE_HTTP_PATH) - viper.BindEnv("superNode.backFill", SUPERNODE_BACKFILL) - - chain := viper.GetString("superNode.chain") - c.Chain, err = shared.NewChainType(chain) - if err != nil { - return nil, err - } - - c.IPFSMode, err = shared.GetIPFSMode() - if err != nil { - return nil, err - } - if c.IPFSMode == shared.LocalInterface || c.IPFSMode == shared.RemoteClient { - c.IPFSPath, err = shared.GetIPFSPath() - if err != nil { - return nil, err - } - } - - c.DBConfig.Init() - - c.Sync = viper.GetBool("superNode.sync") - if c.Sync { - workers := viper.GetInt("superNode.workers") - if workers < 1 { - workers = 1 - } - c.Workers = workers - switch c.Chain { - case shared.Ethereum: - ethWS := viper.GetString("ethereum.wsPath") - c.NodeInfo, c.WSClient, err = shared.GetEthNodeAndClient(fmt.Sprintf("ws://%s", ethWS)) - if err != nil { - return nil, err - } - case shared.Bitcoin: - btcWS := viper.GetString("bitcoin.wsPath") - c.NodeInfo, c.WSClient = shared.GetBtcNodeAndClient(btcWS) - } - syncDBConn := overrideDBConnConfig(c.DBConfig, Sync) - syncDB := utils.LoadPostgres(syncDBConn, c.NodeInfo) - c.SyncDBConn = &syncDB - } - - c.Serve = viper.GetBool("superNode.server") - if c.Serve { - wsPath := viper.GetString("superNode.wsPath") - if wsPath == "" { - wsPath = "127.0.0.1:8080" - } - c.WSEndpoint = wsPath - ipcPath := viper.GetString("superNode.ipcPath") - if ipcPath == "" { - home, err := os.UserHomeDir() - if err != nil { - return nil, err - } - ipcPath = filepath.Join(home, ".vulcanize/vulcanize.ipc") - } - c.IPCEndpoint = ipcPath - httpPath := viper.GetString("superNode.httpPath") - if httpPath == "" { - httpPath = "127.0.0.1:8081" - } - c.HTTPEndpoint = httpPath - serveDBConn := overrideDBConnConfig(c.DBConfig, Serve) - serveDB := utils.LoadPostgres(serveDBConn, c.NodeInfo) - c.ServeDBConn = &serveDB - } - - c.BackFill = viper.GetBool("superNode.backFill") - if c.BackFill { - if err := c.BackFillFields(); err != nil { - return nil, err - } - } - - return c, nil -} - -// BackFillFields is used to fill in the BackFill fields of the config -func (c *Config) BackFillFields() error { - var err error - - viper.BindEnv("ethereum.httpPath", shared.ETH_HTTP_PATH) - viper.BindEnv("bitcoin.httpPath", shared.BTC_HTTP_PATH) - viper.BindEnv("superNode.frequency", SUPERNODE_FREQUENCY) - viper.BindEnv("superNode.batchSize", SUPERNODE_BATCH_SIZE) - viper.BindEnv("superNode.batchNumber", SUPERNODE_BATCH_NUMBER) - viper.BindEnv("superNode.validationLevel", SUPERNODE_VALIDATION_LEVEL) - viper.BindEnv("superNode.timeout", shared.HTTP_TIMEOUT) - - timeout := viper.GetInt("superNode.timeout") - if timeout < 15 { - timeout = 15 - } - c.Timeout = time.Second * time.Duration(timeout) - - switch c.Chain { - case shared.Ethereum: - ethHTTP := viper.GetString("ethereum.httpPath") - c.NodeInfo, c.HTTPClient, err = shared.GetEthNodeAndClient(fmt.Sprintf("http://%s", ethHTTP)) - if err != nil { - return err - } - case shared.Bitcoin: - btcHTTP := viper.GetString("bitcoin.httpPath") - c.NodeInfo, c.HTTPClient = shared.GetBtcNodeAndClient(btcHTTP) - } - - freq := viper.GetInt("superNode.frequency") - var frequency time.Duration - if freq <= 0 { - frequency = time.Second * 30 - } else { - frequency = time.Second * time.Duration(freq) - } - c.Frequency = frequency - c.BatchSize = uint64(viper.GetInt64("superNode.batchSize")) - c.BatchNumber = uint64(viper.GetInt64("superNode.batchNumber")) - c.ValidationLevel = viper.GetInt("superNode.validationLevel") - - backFillDBConn := overrideDBConnConfig(c.DBConfig, BackFill) - backFillDB := utils.LoadPostgres(backFillDBConn, c.NodeInfo) - c.BackFillDBConn = &backFillDB - return nil -} - -type mode string - -var ( - Sync mode = "sync" - BackFill mode = "backFill" - Serve mode = "serve" -) - -func overrideDBConnConfig(con config.Database, m mode) config.Database { - switch m { - case Sync: - viper.BindEnv("database.sync.maxIdle", SYNC_MAX_IDLE_CONNECTIONS) - viper.BindEnv("database.sync.maxOpen", SYNC_MAX_OPEN_CONNECTIONS) - viper.BindEnv("database.sync.maxLifetime", SYNC_MAX_CONN_LIFETIME) - con.MaxIdle = viper.GetInt("database.sync.maxIdle") - con.MaxOpen = viper.GetInt("database.sync.maxOpen") - con.MaxLifetime = viper.GetInt("database.sync.maxLifetime") - case BackFill: - viper.BindEnv("database.backFill.maxIdle", BACKFILL_MAX_IDLE_CONNECTIONS) - viper.BindEnv("database.backFill.maxOpen", BACKFILL_MAX_OPEN_CONNECTIONS) - viper.BindEnv("database.backFill.maxLifetime", BACKFILL_MAX_CONN_LIFETIME) - con.MaxIdle = viper.GetInt("database.backFill.maxIdle") - con.MaxOpen = viper.GetInt("database.backFill.maxOpen") - con.MaxLifetime = viper.GetInt("database.backFill.maxLifetime") - case Serve: - viper.BindEnv("database.server.maxIdle", SERVER_MAX_IDLE_CONNECTIONS) - viper.BindEnv("database.server.maxOpen", SERVER_MAX_OPEN_CONNECTIONS) - viper.BindEnv("database.server.maxLifetime", SERVER_MAX_CONN_LIFETIME) - con.MaxIdle = viper.GetInt("database.server.maxIdle") - con.MaxOpen = viper.GetInt("database.server.maxOpen") - con.MaxLifetime = viper.GetInt("database.server.maxLifetime") - default: - } - return con -} diff --git a/pkg/super_node/constructors.go b/pkg/super_node/constructors.go deleted file mode 100644 index 283d3141..00000000 --- a/pkg/super_node/constructors.go +++ /dev/null @@ -1,219 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package super_node - -import ( - "fmt" - "time" - - "github.com/btcsuite/btcd/chaincfg" - "github.com/btcsuite/btcd/rpcclient" - "github.com/ethereum/go-ethereum/params" - "github.com/ethereum/go-ethereum/rpc" - - "github.com/vulcanize/vulcanizedb/pkg/postgres" - "github.com/vulcanize/vulcanizedb/pkg/super_node/btc" - "github.com/vulcanize/vulcanizedb/pkg/super_node/eth" - "github.com/vulcanize/vulcanizedb/pkg/super_node/shared" -) - -// NewResponseFilterer constructs a ResponseFilterer for the provided chain type -func NewResponseFilterer(chain shared.ChainType) (shared.ResponseFilterer, error) { - switch chain { - case shared.Ethereum: - return eth.NewResponseFilterer(), nil - case shared.Bitcoin: - return btc.NewResponseFilterer(), nil - default: - return nil, fmt.Errorf("invalid chain %s for filterer constructor", chain.String()) - } -} - -// NewCIDIndexer constructs a CIDIndexer for the provided chain type -func NewCIDIndexer(chain shared.ChainType, db *postgres.DB, ipfsMode shared.IPFSMode) (shared.CIDIndexer, error) { - switch chain { - case shared.Ethereum: - switch ipfsMode { - case shared.LocalInterface, shared.RemoteClient: - return eth.NewCIDIndexer(db), nil - case shared.DirectPostgres: - return eth.NewIPLDPublisherAndIndexer(db), nil - default: - return nil, fmt.Errorf("ethereum CIDIndexer unexpected ipfs mode %s", ipfsMode.String()) - } - case shared.Bitcoin: - switch ipfsMode { - case shared.LocalInterface, shared.RemoteClient: - return btc.NewCIDIndexer(db), nil - case shared.DirectPostgres: - return eth.NewIPLDPublisherAndIndexer(db), nil - default: - return nil, fmt.Errorf("bitcoin CIDIndexer unexpected ipfs mode %s", ipfsMode.String()) - } - default: - return nil, fmt.Errorf("invalid chain %s for indexer constructor", chain.String()) - } -} - -// NewCIDRetriever constructs a CIDRetriever for the provided chain type -func NewCIDRetriever(chain shared.ChainType, db *postgres.DB) (shared.CIDRetriever, error) { - switch chain { - case shared.Ethereum: - return eth.NewCIDRetriever(db), nil - case shared.Bitcoin: - return btc.NewCIDRetriever(db), nil - default: - return nil, fmt.Errorf("invalid chain %s for retriever constructor", chain.String()) - } -} - -// NewPayloadStreamer constructs a PayloadStreamer for the provided chain type -func NewPayloadStreamer(chain shared.ChainType, clientOrConfig interface{}) (shared.PayloadStreamer, chan shared.RawChainData, error) { - switch chain { - case shared.Ethereum: - ethClient, ok := clientOrConfig.(*rpc.Client) - if !ok { - return nil, nil, fmt.Errorf("ethereum payload streamer constructor expected client type %T got %T", &rpc.Client{}, clientOrConfig) - } - streamChan := make(chan shared.RawChainData, eth.PayloadChanBufferSize) - return eth.NewPayloadStreamer(ethClient), streamChan, nil - case shared.Bitcoin: - btcClientConn, ok := clientOrConfig.(*rpcclient.ConnConfig) - if !ok { - return nil, nil, fmt.Errorf("bitcoin payload streamer constructor expected client config type %T got %T", rpcclient.ConnConfig{}, clientOrConfig) - } - streamChan := make(chan shared.RawChainData, btc.PayloadChanBufferSize) - return btc.NewHTTPPayloadStreamer(btcClientConn), streamChan, nil - default: - return nil, nil, fmt.Errorf("invalid chain %s for streamer constructor", chain.String()) - } -} - -// NewPaylaodFetcher constructs a PayloadFetcher for the provided chain type -func NewPaylaodFetcher(chain shared.ChainType, client interface{}, timeout time.Duration) (shared.PayloadFetcher, error) { - switch chain { - case shared.Ethereum: - batchClient, ok := client.(*rpc.Client) - if !ok { - return nil, fmt.Errorf("ethereum payload fetcher constructor expected client type %T got %T", &rpc.Client{}, client) - } - return eth.NewPayloadFetcher(batchClient, timeout), nil - case shared.Bitcoin: - connConfig, ok := client.(*rpcclient.ConnConfig) - if !ok { - return nil, fmt.Errorf("bitcoin payload fetcher constructor expected client type %T got %T", &rpcclient.Client{}, client) - } - return btc.NewPayloadFetcher(connConfig) - default: - return nil, fmt.Errorf("invalid chain %s for payload fetcher constructor", chain.String()) - } -} - -// NewPayloadConverter constructs a PayloadConverter for the provided chain type -func NewPayloadConverter(chain shared.ChainType) (shared.PayloadConverter, error) { - switch chain { - case shared.Ethereum: - return eth.NewPayloadConverter(params.MainnetChainConfig), nil - case shared.Bitcoin: - return btc.NewPayloadConverter(&chaincfg.MainNetParams), nil - default: - return nil, fmt.Errorf("invalid chain %s for converter constructor", chain.String()) - } -} - -// NewIPLDFetcher constructs an IPLDFetcher for the provided chain type -func NewIPLDFetcher(chain shared.ChainType, ipfsPath string, db *postgres.DB, ipfsMode shared.IPFSMode) (shared.IPLDFetcher, error) { - switch chain { - case shared.Ethereum: - switch ipfsMode { - case shared.LocalInterface, shared.RemoteClient: - return eth.NewIPLDFetcher(ipfsPath) - case shared.DirectPostgres: - return eth.NewIPLDPGFetcher(db), nil - default: - return nil, fmt.Errorf("ethereum IPLDFetcher unexpected ipfs mode %s", ipfsMode.String()) - } - case shared.Bitcoin: - switch ipfsMode { - case shared.LocalInterface, shared.RemoteClient: - return btc.NewIPLDFetcher(ipfsPath) - case shared.DirectPostgres: - return btc.NewIPLDPGFetcher(db), nil - default: - return nil, fmt.Errorf("bitcoin IPLDFetcher unexpected ipfs mode %s", ipfsMode.String()) - } - default: - return nil, fmt.Errorf("invalid chain %s for IPLD fetcher constructor", chain.String()) - } -} - -// NewIPLDPublisher constructs an IPLDPublisher for the provided chain type -func NewIPLDPublisher(chain shared.ChainType, ipfsPath string, db *postgres.DB, ipfsMode shared.IPFSMode) (shared.IPLDPublisher, error) { - switch chain { - case shared.Ethereum: - switch ipfsMode { - case shared.LocalInterface, shared.RemoteClient: - return eth.NewIPLDPublisher(ipfsPath) - case shared.DirectPostgres: - return eth.NewIPLDPublisherAndIndexer(db), nil - default: - return nil, fmt.Errorf("ethereum IPLDPublisher unexpected ipfs mode %s", ipfsMode.String()) - } - case shared.Bitcoin: - switch ipfsMode { - case shared.LocalInterface, shared.RemoteClient: - return btc.NewIPLDPublisher(ipfsPath) - case shared.DirectPostgres: - return btc.NewIPLDPublisherAndIndexer(db), nil - default: - return nil, fmt.Errorf("bitcoin IPLDPublisher unexpected ipfs mode %s", ipfsMode.String()) - } - default: - return nil, fmt.Errorf("invalid chain %s for publisher constructor", chain.String()) - } -} - -// NewPublicAPI constructs a PublicAPI for the provided chain type -func NewPublicAPI(chain shared.ChainType, db *postgres.DB, ipfsPath string) (rpc.API, error) { - switch chain { - case shared.Ethereum: - backend, err := eth.NewEthBackend(db) - if err != nil { - return rpc.API{}, err - } - return rpc.API{ - Namespace: eth.APIName, - Version: eth.APIVersion, - Service: eth.NewPublicEthAPI(backend), - Public: true, - }, nil - default: - return rpc.API{}, fmt.Errorf("invalid chain %s for public api constructor", chain.String()) - } -} - -// NewCleaner constructs a Cleaner for the provided chain type -func NewCleaner(chain shared.ChainType, db *postgres.DB) (shared.Cleaner, error) { - switch chain { - case shared.Ethereum: - return eth.NewCleaner(db), nil - case shared.Bitcoin: - return btc.NewCleaner(db), nil - default: - return nil, fmt.Errorf("invalid chain %s for cleaner constructor", chain.String()) - } -} diff --git a/pkg/super_node/eth/eth_suite_test.go b/pkg/super_node/eth/eth_suite_test.go deleted file mode 100644 index a2831e54..00000000 --- a/pkg/super_node/eth/eth_suite_test.go +++ /dev/null @@ -1,35 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package eth_test - -import ( - "io/ioutil" - "testing" - - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - "github.com/sirupsen/logrus" -) - -func TestETHSuperNode(t *testing.T) { - RegisterFailHandler(Fail) - RunSpecs(t, "Super Node ETH Suite Test") -} - -var _ = BeforeSuite(func() { - logrus.SetOutput(ioutil.Discard) -}) diff --git a/pkg/super_node/service.go b/pkg/super_node/service.go deleted file mode 100644 index 611eb85a..00000000 --- a/pkg/super_node/service.go +++ /dev/null @@ -1,521 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package super_node - -import ( - "fmt" - "sync" - - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/crypto" - "github.com/ethereum/go-ethereum/node" - "github.com/ethereum/go-ethereum/p2p" - "github.com/ethereum/go-ethereum/rlp" - "github.com/ethereum/go-ethereum/rpc" - log "github.com/sirupsen/logrus" - - "github.com/vulcanize/vulcanizedb/pkg/eth/core" - "github.com/vulcanize/vulcanizedb/pkg/postgres" - "github.com/vulcanize/vulcanizedb/pkg/super_node/shared" -) - -const ( - PayloadChanBufferSize = 2000 -) - -// SuperNode is the top level interface for streaming, converting to IPLDs, publishing, -// and indexing all chain data; screening this data; and serving it up to subscribed clients -// This service is compatible with the Ethereum service interface (node.Service) -type SuperNode interface { - // APIs(), Protocols(), Start() and Stop() - node.Service - // Data processing event loop - Sync(wg *sync.WaitGroup, forwardPayloadChan chan<- shared.ConvertedData) error - // Pub-Sub handling event loop - Serve(wg *sync.WaitGroup, screenAndServePayload <-chan shared.ConvertedData) - // Method to subscribe to the service - Subscribe(id rpc.ID, sub chan<- SubscriptionPayload, quitChan chan<- bool, params shared.SubscriptionSettings) - // Method to unsubscribe from the service - Unsubscribe(id rpc.ID) - // Method to access the node info for the service - Node() *core.Node - // Method to access chain type - Chain() shared.ChainType -} - -// Service is the underlying struct for the super node -type Service struct { - // Used to sync access to the Subscriptions - sync.Mutex - // Interface for streaming payloads over an rpc subscription - Streamer shared.PayloadStreamer - // Interface for converting raw payloads into IPLD object payloads - Converter shared.PayloadConverter - // Interface for publishing the IPLD payloads to IPFS - Publisher shared.IPLDPublisher - // Interface for indexing the CIDs of the published IPLDs in Postgres - Indexer shared.CIDIndexer - // Interface for filtering and serving data according to subscribed clients according to their specification - Filterer shared.ResponseFilterer - // Interface for fetching IPLD objects from IPFS - IPLDFetcher shared.IPLDFetcher - // Interface for searching and retrieving CIDs from Postgres index - Retriever shared.CIDRetriever - // Chan the processor uses to subscribe to payloads from the Streamer - PayloadChan chan shared.RawChainData - // Used to signal shutdown of the service - QuitChan chan bool - // A mapping of rpc.IDs to their subscription channels, mapped to their subscription type (hash of the StreamFilters) - Subscriptions map[common.Hash]map[rpc.ID]Subscription - // A mapping of subscription params hash to the corresponding subscription params - SubscriptionTypes map[common.Hash]shared.SubscriptionSettings - // Info for the Geth node that this super node is working with - NodeInfo *core.Node - // Number of publishAndIndex workers - WorkerPoolSize int - // chain type for this service - chain shared.ChainType - // Path to ipfs data dir - ipfsPath string - // Underlying db - db *postgres.DB - // wg for syncing serve processes - serveWg *sync.WaitGroup -} - -// NewSuperNode creates a new super_node.Interface using an underlying super_node.Service struct -func NewSuperNode(settings *Config) (SuperNode, error) { - sn := new(Service) - var err error - // If we are syncing, initialize the needed interfaces - if settings.Sync { - sn.Streamer, sn.PayloadChan, err = NewPayloadStreamer(settings.Chain, settings.WSClient) - if err != nil { - return nil, err - } - sn.Converter, err = NewPayloadConverter(settings.Chain) - if err != nil { - return nil, err - } - sn.Publisher, err = NewIPLDPublisher(settings.Chain, settings.IPFSPath, settings.SyncDBConn, settings.IPFSMode) - if err != nil { - return nil, err - } - sn.Indexer, err = NewCIDIndexer(settings.Chain, settings.SyncDBConn, settings.IPFSMode) - if err != nil { - return nil, err - } - sn.Filterer, err = NewResponseFilterer(settings.Chain) - if err != nil { - return nil, err - } - } - // If we are serving, initialize the needed interfaces - if settings.Serve { - sn.Retriever, err = NewCIDRetriever(settings.Chain, settings.ServeDBConn) - if err != nil { - return nil, err - } - sn.IPLDFetcher, err = NewIPLDFetcher(settings.Chain, settings.IPFSPath, settings.ServeDBConn, settings.IPFSMode) - if err != nil { - return nil, err - } - sn.db = settings.ServeDBConn - } - sn.QuitChan = make(chan bool) - sn.Subscriptions = make(map[common.Hash]map[rpc.ID]Subscription) - sn.SubscriptionTypes = make(map[common.Hash]shared.SubscriptionSettings) - sn.WorkerPoolSize = settings.Workers - sn.NodeInfo = &settings.NodeInfo - sn.ipfsPath = settings.IPFSPath - sn.chain = settings.Chain - return sn, nil -} - -// Protocols exports the services p2p protocols, this service has none -func (sap *Service) Protocols() []p2p.Protocol { - return []p2p.Protocol{} -} - -// APIs returns the RPC descriptors the super node service offers -func (sap *Service) APIs() []rpc.API { - ifnoAPI := NewInfoAPI() - apis := []rpc.API{ - { - Namespace: APIName, - Version: APIVersion, - Service: NewPublicSuperNodeAPI(sap), - Public: true, - }, - { - Namespace: "rpc", - Version: APIVersion, - Service: ifnoAPI, - Public: true, - }, - { - Namespace: "net", - Version: APIVersion, - Service: ifnoAPI, - Public: true, - }, - { - Namespace: "admin", - Version: APIVersion, - Service: ifnoAPI, - Public: true, - }, - } - chainAPI, err := NewPublicAPI(sap.chain, sap.db, sap.ipfsPath) - if err != nil { - log.Error(err) - return apis - } - return append(apis, chainAPI) -} - -// Sync streams incoming raw chain data and converts it for further processing -// It forwards the converted data to the publishAndIndex process(es) it spins up -// If forwards the converted data to a ScreenAndServe process if it there is one listening on the passed screenAndServePayload channel -// This continues on no matter if or how many subscribers there are -func (sap *Service) Sync(wg *sync.WaitGroup, screenAndServePayload chan<- shared.ConvertedData) error { - sub, err := sap.Streamer.Stream(sap.PayloadChan) - if err != nil { - return err - } - // spin up publishAndIndex worker goroutines - publishAndIndexPayload := make(chan shared.ConvertedData, PayloadChanBufferSize) - for i := 1; i <= sap.WorkerPoolSize; i++ { - go sap.publishAndIndex(wg, i, publishAndIndexPayload) - log.Debugf("%s publishAndIndex worker %d successfully spun up", sap.chain.String(), i) - } - go func() { - wg.Add(1) - defer wg.Done() - for { - select { - case payload := <-sap.PayloadChan: - ipldPayload, err := sap.Converter.Convert(payload) - if err != nil { - log.Errorf("super node conversion error for chain %s: %v", sap.chain.String(), err) - continue - } - log.Infof("%s data streamed at head height %d", sap.chain.String(), ipldPayload.Height()) - // If we have a ScreenAndServe process running, forward the iplds to it - select { - case screenAndServePayload <- ipldPayload: - default: - } - // Forward the payload to the publishAndIndex workers - // this channel acts as a ring buffer - select { - case publishAndIndexPayload <- ipldPayload: - default: - <-publishAndIndexPayload - publishAndIndexPayload <- ipldPayload - } - case err := <-sub.Err(): - log.Errorf("super node subscription error for chain %s: %v", sap.chain.String(), err) - case <-sap.QuitChan: - log.Infof("quiting %s Sync process", sap.chain.String()) - return - } - } - }() - log.Infof("%s Sync goroutine successfully spun up", sap.chain.String()) - return nil -} - -// publishAndIndex is spun up by SyncAndConvert and receives converted chain data from that process -// it publishes this data to IPFS and indexes their CIDs with useful metadata in Postgres -func (sap *Service) publishAndIndex(wg *sync.WaitGroup, id int, publishAndIndexPayload <-chan shared.ConvertedData) { - wg.Add(1) - defer wg.Done() - for { - select { - case payload := <-publishAndIndexPayload: - log.Debugf("%s super node publishAndIndex worker %d publishing data streamed at head height %d", sap.chain.String(), id, payload.Height()) - cidPayload, err := sap.Publisher.Publish(payload) - if err != nil { - log.Errorf("%s super node publishAndIndex worker %d publishing error: %v", sap.chain.String(), id, err) - continue - } - log.Debugf("%s super node publishAndIndex worker %d indexing data streamed at head height %d", sap.chain.String(), id, payload.Height()) - if err := sap.Indexer.Index(cidPayload); err != nil { - log.Errorf("%s super node publishAndIndex worker %d indexing error: %v", sap.chain.String(), id, err) - } - case <-sap.QuitChan: - log.Infof("%s super node publishAndIndex worker %d shutting down", sap.chain.String(), id) - return - } - } -} - -// Serve listens for incoming converter data off the screenAndServePayload from the Sync process -// It filters and sends this data to any subscribers to the service -// This process can also be stood up alone, without an screenAndServePayload attached to a Sync process -// and it will hang on the WaitGroup indefinitely, allowing the Service to serve historical data requests only -func (sap *Service) Serve(wg *sync.WaitGroup, screenAndServePayload <-chan shared.ConvertedData) { - sap.serveWg = wg - go func() { - wg.Add(1) - defer wg.Done() - for { - select { - case payload := <-screenAndServePayload: - sap.filterAndServe(payload) - case <-sap.QuitChan: - log.Infof("quiting %s Serve process", sap.chain.String()) - return - } - } - }() - log.Infof("%s Serve goroutine successfully spun up", sap.chain.String()) -} - -// filterAndServe filters the payload according to each subscription type and sends to the subscriptions -func (sap *Service) filterAndServe(payload shared.ConvertedData) { - log.Debugf("sending %s payload to subscriptions", sap.chain.String()) - sap.Lock() - sap.serveWg.Add(1) - defer sap.Unlock() - defer sap.serveWg.Done() - for ty, subs := range sap.Subscriptions { - // Retrieve the subscription parameters for this subscription type - subConfig, ok := sap.SubscriptionTypes[ty] - if !ok { - log.Errorf("super node %s subscription configuration for subscription type %s not available", sap.chain.String(), ty.Hex()) - sap.closeType(ty) - continue - } - if subConfig.EndingBlock().Int64() > 0 && subConfig.EndingBlock().Int64() < payload.Height() { - // We are not out of range for this subscription type - // close it, and continue to the next - sap.closeType(ty) - continue - } - response, err := sap.Filterer.Filter(subConfig, payload) - if err != nil { - log.Errorf("super node filtering error for chain %s: %v", sap.chain.String(), err) - sap.closeType(ty) - continue - } - responseRLP, err := rlp.EncodeToBytes(response) - if err != nil { - log.Errorf("super node rlp encoding error for chain %s: %v", sap.chain.String(), err) - continue - } - for id, sub := range subs { - select { - case sub.PayloadChan <- SubscriptionPayload{Data: responseRLP, Err: "", Flag: EmptyFlag, Height: response.Height()}: - log.Debugf("sending super node %s payload to subscription %s", sap.chain.String(), id) - default: - log.Infof("unable to send %s payload to subscription %s; channel has no receiver", sap.chain.String(), id) - } - } - } -} - -// Subscribe is used by the API to remotely subscribe to the service loop -// The params must be rlp serializable and satisfy the SubscriptionSettings() interface -func (sap *Service) Subscribe(id rpc.ID, sub chan<- SubscriptionPayload, quitChan chan<- bool, params shared.SubscriptionSettings) { - sap.serveWg.Add(1) - defer sap.serveWg.Done() - log.Infof("New %s subscription %s", sap.chain.String(), id) - subscription := Subscription{ - ID: id, - PayloadChan: sub, - QuitChan: quitChan, - } - if params.ChainType() != sap.chain { - sendNonBlockingErr(subscription, fmt.Errorf("subscription %s is for chain %s, service supports chain %s", id, params.ChainType().String(), sap.chain.String())) - sendNonBlockingQuit(subscription) - return - } - // Subscription type is defined as the hash of the rlp-serialized subscription settings - by, err := rlp.EncodeToBytes(params) - if err != nil { - sendNonBlockingErr(subscription, err) - sendNonBlockingQuit(subscription) - return - } - subscriptionType := crypto.Keccak256Hash(by) - if !params.HistoricalDataOnly() { - // Add subscriber - sap.Lock() - if sap.Subscriptions[subscriptionType] == nil { - sap.Subscriptions[subscriptionType] = make(map[rpc.ID]Subscription) - } - sap.Subscriptions[subscriptionType][id] = subscription - sap.SubscriptionTypes[subscriptionType] = params - sap.Unlock() - } - // If the subscription requests a backfill, use the Postgres index to lookup and retrieve historical data - // Otherwise we only filter new data as it is streamed in from the state diffing geth node - if params.HistoricalData() || params.HistoricalDataOnly() { - if err := sap.sendHistoricalData(subscription, id, params); err != nil { - sendNonBlockingErr(subscription, fmt.Errorf("%s super node subscriber backfill error: %v", sap.chain.String(), err)) - sendNonBlockingQuit(subscription) - return - } - } -} - -// sendHistoricalData sends historical data to the requesting subscription -func (sap *Service) sendHistoricalData(sub Subscription, id rpc.ID, params shared.SubscriptionSettings) error { - log.Infof("Sending %s historical data to subscription %s", sap.chain.String(), id) - // Retrieve cached CIDs relevant to this subscriber - var endingBlock int64 - var startingBlock int64 - var err error - startingBlock, err = sap.Retriever.RetrieveFirstBlockNumber() - if err != nil { - return err - } - if startingBlock < params.StartingBlock().Int64() { - startingBlock = params.StartingBlock().Int64() - } - endingBlock, err = sap.Retriever.RetrieveLastBlockNumber() - if err != nil { - return err - } - if endingBlock > params.EndingBlock().Int64() && params.EndingBlock().Int64() > 0 && params.EndingBlock().Int64() > startingBlock { - endingBlock = params.EndingBlock().Int64() - } - log.Debugf("%s historical data starting block: %d", sap.chain.String(), params.StartingBlock().Int64()) - log.Debugf("%s historical data ending block: %d", sap.chain.String(), endingBlock) - go func() { - sap.serveWg.Add(1) - defer sap.serveWg.Done() - for i := startingBlock; i <= endingBlock; i++ { - select { - case <-sap.QuitChan: - log.Infof("%s super node historical data feed to subscription %s closed", sap.chain.String(), id) - return - default: - } - cidWrappers, empty, err := sap.Retriever.Retrieve(params, i) - if err != nil { - sendNonBlockingErr(sub, fmt.Errorf(" %s super node CID Retrieval error at block %d\r%s", sap.chain.String(), i, err.Error())) - continue - } - if empty { - continue - } - for _, cids := range cidWrappers { - response, err := sap.IPLDFetcher.Fetch(cids) - if err != nil { - sendNonBlockingErr(sub, fmt.Errorf("%s super node IPLD Fetching error at block %d\r%s", sap.chain.String(), i, err.Error())) - continue - } - responseRLP, err := rlp.EncodeToBytes(response) - if err != nil { - log.Error(err) - continue - } - select { - case sub.PayloadChan <- SubscriptionPayload{Data: responseRLP, Err: "", Flag: EmptyFlag, Height: response.Height()}: - log.Debugf("sending super node historical data payload to %s subscription %s", sap.chain.String(), id) - default: - log.Infof("unable to send backFill payload to %s subscription %s; channel has no receiver", sap.chain.String(), id) - } - } - } - // when we are done backfilling send an empty payload signifying so in the msg - select { - case sub.PayloadChan <- SubscriptionPayload{Data: nil, Err: "", Flag: BackFillCompleteFlag}: - log.Debugf("sending backFill completion notice to %s subscription %s", sap.chain.String(), id) - default: - log.Infof("unable to send backFill completion notice to %s subscription %s", sap.chain.String(), id) - } - }() - return nil -} - -// Unsubscribe is used by the API to remotely unsubscribe to the StateDiffingService loop -func (sap *Service) Unsubscribe(id rpc.ID) { - log.Infof("Unsubscribing %s from the %s super node service", id, sap.chain.String()) - sap.Lock() - for ty := range sap.Subscriptions { - delete(sap.Subscriptions[ty], id) - if len(sap.Subscriptions[ty]) == 0 { - // If we removed the last subscription of this type, remove the subscription type outright - delete(sap.Subscriptions, ty) - delete(sap.SubscriptionTypes, ty) - } - } - sap.Unlock() -} - -// Start is used to begin the service -// This is mostly just to satisfy the node.Service interface -func (sap *Service) Start(*p2p.Server) error { - log.Infof("Starting %s super node service", sap.chain.String()) - wg := new(sync.WaitGroup) - payloadChan := make(chan shared.ConvertedData, PayloadChanBufferSize) - if err := sap.Sync(wg, payloadChan); err != nil { - return err - } - sap.Serve(wg, payloadChan) - return nil -} - -// Stop is used to close down the service -// This is mostly just to satisfy the node.Service interface -func (sap *Service) Stop() error { - log.Infof("Stopping %s super node service", sap.chain.String()) - sap.Lock() - close(sap.QuitChan) - sap.close() - sap.Unlock() - return nil -} - -// Node returns the node info for this service -func (sap *Service) Node() *core.Node { - return sap.NodeInfo -} - -// Chain returns the chain type for this service -func (sap *Service) Chain() shared.ChainType { - return sap.chain -} - -// close is used to close all listening subscriptions -// close needs to be called with subscription access locked -func (sap *Service) close() { - log.Infof("Closing all %s subscriptions", sap.chain.String()) - for subType, subs := range sap.Subscriptions { - for _, sub := range subs { - sendNonBlockingQuit(sub) - } - delete(sap.Subscriptions, subType) - delete(sap.SubscriptionTypes, subType) - } -} - -// closeType is used to close all subscriptions of given type -// closeType needs to be called with subscription access locked -func (sap *Service) closeType(subType common.Hash) { - log.Infof("Closing all %s subscriptions of type %s", sap.chain.String(), subType.String()) - subs := sap.Subscriptions[subType] - for _, sub := range subs { - sendNonBlockingQuit(sub) - } - delete(sap.Subscriptions, subType) - delete(sap.SubscriptionTypes, subType) -} diff --git a/pkg/wasm/instantiator.go b/pkg/wasm/instantiator.go deleted file mode 100644 index 9f9c2065..00000000 --- a/pkg/wasm/instantiator.go +++ /dev/null @@ -1,56 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package wasm - -import ( - "github.com/vulcanize/vulcanizedb/pkg/postgres" -) - -// Instantiator is used to instantiate WASM functions in Postgres -type Instantiator struct { - db *postgres.DB - instances []WasmFunction // WASM file paths and namespaces -} - -type WasmFunction struct { - BinaryPath string - Namespace string -} - -// NewWASMInstantiator returns a pointer to a new Instantiator -func NewWASMInstantiator(db *postgres.DB, instances []WasmFunction) *Instantiator { - return &Instantiator{ - db: db, - instances: instances, - } -} - -// Instantiate is used to load the WASM functions into Postgres -func (i *Instantiator) Instantiate() error { - // TODO: enable instantiation of WASM functions from IPFS - tx, err := i.db.Beginx() - if err != nil { - return err - } - for _, pn := range i.instances { - _, err := tx.Exec(`SELECT wasm_new_instance('$1', '$2')`, pn.BinaryPath, pn.Namespace) - if err != nil { - return err - } - } - return tx.Commit() -} diff --git a/pkg/super_node/api.go b/pkg/watcher/api.go similarity index 94% rename from pkg/super_node/api.go rename to pkg/watcher/api.go index ff74cdde..fde767e5 100644 --- a/pkg/super_node/api.go +++ b/pkg/watcher/api.go @@ -14,7 +14,7 @@ // You should have received a copy of the GNU Affero General Public License // along with this program. If not, see . -package super_node +package watcher import ( "context" @@ -24,11 +24,11 @@ import ( "github.com/ethereum/go-ethereum/rpc" log "github.com/sirupsen/logrus" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" - "github.com/vulcanize/vulcanizedb/pkg/super_node/btc" - "github.com/vulcanize/vulcanizedb/pkg/super_node/eth" - "github.com/vulcanize/vulcanizedb/pkg/super_node/shared" - v "github.com/vulcanize/vulcanizedb/version" + "github.com/vulcanize/ipfs-chain-watcher/pkg/btc" + "github.com/vulcanize/ipfs-chain-watcher/pkg/core" + "github.com/vulcanize/ipfs-chain-watcher/pkg/eth" + "github.com/vulcanize/ipfs-chain-watcher/pkg/shared" + v "github.com/vulcanize/ipfs-chain-watcher/version" ) // APIName is the namespace used for the state diffing service API diff --git a/pkg/super_node/backfiller.go b/pkg/watcher/backfiller.go similarity index 98% rename from pkg/super_node/backfiller.go rename to pkg/watcher/backfiller.go index 588a9961..5263d3b9 100644 --- a/pkg/super_node/backfiller.go +++ b/pkg/watcher/backfiller.go @@ -14,7 +14,7 @@ // You should have received a copy of the GNU Affero General Public License // along with this program. If not, see . -package super_node +package watcher import ( "sync" @@ -22,8 +22,8 @@ import ( log "github.com/sirupsen/logrus" - utils "github.com/vulcanize/vulcanizedb/libraries/shared/utilities" - "github.com/vulcanize/vulcanizedb/pkg/super_node/shared" + "github.com/vulcanize/ipfs-chain-watcher/pkg/shared" + "github.com/vulcanize/ipfs-chain-watcher/utils" ) const ( diff --git a/pkg/super_node/backfiller_test.go b/pkg/watcher/backfiller_test.go similarity index 89% rename from pkg/super_node/backfiller_test.go rename to pkg/watcher/backfiller_test.go index e0e804ff..99b2910a 100644 --- a/pkg/super_node/backfiller_test.go +++ b/pkg/watcher/backfiller_test.go @@ -14,7 +14,7 @@ // You should have received a copy of the GNU Affero General Public License // along with this program. If not, see . -package super_node_test +package watcher_test import ( "sync" @@ -23,11 +23,11 @@ import ( . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" - "github.com/vulcanize/vulcanizedb/pkg/super_node" - "github.com/vulcanize/vulcanizedb/pkg/super_node/eth" - "github.com/vulcanize/vulcanizedb/pkg/super_node/eth/mocks" - "github.com/vulcanize/vulcanizedb/pkg/super_node/shared" - mocks2 "github.com/vulcanize/vulcanizedb/pkg/super_node/shared/mocks" + "github.com/vulcanize/ipfs-chain-watcher/pkg/eth" + "github.com/vulcanize/ipfs-chain-watcher/pkg/eth/mocks" + "github.com/vulcanize/ipfs-chain-watcher/pkg/shared" + mocks2 "github.com/vulcanize/ipfs-chain-watcher/pkg/shared/mocks" + "github.com/vulcanize/ipfs-chain-watcher/pkg/watcher" ) var _ = Describe("BackFiller", func() { @@ -59,15 +59,15 @@ var _ = Describe("BackFiller", func() { }, } quitChan := make(chan bool, 1) - backfiller := &super_node.BackFillService{ + backfiller := &watcher.BackFillService{ Indexer: mockCidRepo, Publisher: mockPublisher, Converter: mockConverter, Fetcher: mockFetcher, Retriever: mockRetriever, GapCheckFrequency: time.Second * 2, - BatchSize: super_node.DefaultMaxBatchSize, - BatchNumber: super_node.DefaultMaxBatchNumber, + BatchSize: watcher.DefaultMaxBatchSize, + BatchNumber: watcher.DefaultMaxBatchNumber, QuitChan: quitChan, } wg := &sync.WaitGroup{} @@ -114,15 +114,15 @@ var _ = Describe("BackFiller", func() { }, } quitChan := make(chan bool, 1) - backfiller := &super_node.BackFillService{ + backfiller := &watcher.BackFillService{ Indexer: mockCidRepo, Publisher: mockPublisher, Converter: mockConverter, Fetcher: mockFetcher, Retriever: mockRetriever, GapCheckFrequency: time.Second * 2, - BatchSize: super_node.DefaultMaxBatchSize, - BatchNumber: super_node.DefaultMaxBatchNumber, + BatchSize: watcher.DefaultMaxBatchSize, + BatchNumber: watcher.DefaultMaxBatchNumber, QuitChan: quitChan, } wg := &sync.WaitGroup{} @@ -168,15 +168,15 @@ var _ = Describe("BackFiller", func() { }, } quitChan := make(chan bool, 1) - backfiller := &super_node.BackFillService{ + backfiller := &watcher.BackFillService{ Indexer: mockCidRepo, Publisher: mockPublisher, Converter: mockConverter, Fetcher: mockFetcher, Retriever: mockRetriever, GapCheckFrequency: time.Second * 2, - BatchSize: super_node.DefaultMaxBatchSize, - BatchNumber: super_node.DefaultMaxBatchNumber, + BatchSize: watcher.DefaultMaxBatchSize, + BatchNumber: watcher.DefaultMaxBatchNumber, QuitChan: quitChan, } wg := &sync.WaitGroup{} diff --git a/pkg/watcher/btc/repository.go b/pkg/watcher/btc/repository.go deleted file mode 100644 index 07482785..00000000 --- a/pkg/watcher/btc/repository.go +++ /dev/null @@ -1 +0,0 @@ -package btc diff --git a/pkg/watcher/config.go b/pkg/watcher/config.go index 8c292ae0..b6a03aa7 100644 --- a/pkg/watcher/config.go +++ b/pkg/watcher/config.go @@ -17,122 +17,253 @@ package watcher import ( - "context" - "errors" "fmt" + "os" + "path/filepath" + "time" - "github.com/vulcanize/vulcanizedb/pkg/wasm" - - "github.com/ethereum/go-ethereum/rpc" "github.com/spf13/viper" - "github.com/vulcanize/vulcanizedb/pkg/config" - "github.com/vulcanize/vulcanizedb/pkg/eth/client" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" - "github.com/vulcanize/vulcanizedb/pkg/postgres" - "github.com/vulcanize/vulcanizedb/pkg/super_node/btc" - "github.com/vulcanize/vulcanizedb/pkg/super_node/eth" - "github.com/vulcanize/vulcanizedb/pkg/super_node/shared" - shared2 "github.com/vulcanize/vulcanizedb/pkg/watcher/shared" - "github.com/vulcanize/vulcanizedb/utils" + "github.com/vulcanize/ipfs-chain-watcher/pkg/config" + "github.com/vulcanize/ipfs-chain-watcher/pkg/core" + "github.com/vulcanize/ipfs-chain-watcher/pkg/postgres" + "github.com/vulcanize/ipfs-chain-watcher/pkg/shared" + "github.com/vulcanize/ipfs-chain-watcher/utils" ) -// Config holds all of the parameters necessary for defining and running an instance of a watcher +// Env variables +const ( + SUPERNODE_CHAIN = "SUPERNODE_CHAIN" + SUPERNODE_SYNC = "SUPERNODE_SYNC" + SUPERNODE_WORKERS = "SUPERNODE_WORKERS" + SUPERNODE_SERVER = "SUPERNODE_SERVER" + SUPERNODE_WS_PATH = "SUPERNODE_WS_PATH" + SUPERNODE_IPC_PATH = "SUPERNODE_IPC_PATH" + SUPERNODE_HTTP_PATH = "SUPERNODE_HTTP_PATH" + SUPERNODE_BACKFILL = "SUPERNODE_BACKFILL" + SUPERNODE_FREQUENCY = "SUPERNODE_FREQUENCY" + SUPERNODE_BATCH_SIZE = "SUPERNODE_BATCH_SIZE" + SUPERNODE_BATCH_NUMBER = "SUPERNODE_BATCH_NUMBER" + SUPERNODE_VALIDATION_LEVEL = "SUPERNODE_VALIDATION_LEVEL" + + SYNC_MAX_IDLE_CONNECTIONS = "SYNC_MAX_IDLE_CONNECTIONS" + SYNC_MAX_OPEN_CONNECTIONS = "SYNC_MAX_OPEN_CONNECTIONS" + SYNC_MAX_CONN_LIFETIME = "SYNC_MAX_CONN_LIFETIME" + + BACKFILL_MAX_IDLE_CONNECTIONS = "BACKFILL_MAX_IDLE_CONNECTIONS" + BACKFILL_MAX_OPEN_CONNECTIONS = "BACKFILL_MAX_OPEN_CONNECTIONS" + BACKFILL_MAX_CONN_LIFETIME = "BACKFILL_MAX_CONN_LIFETIME" + + SERVER_MAX_IDLE_CONNECTIONS = "SERVER_MAX_IDLE_CONNECTIONS" + SERVER_MAX_OPEN_CONNECTIONS = "SERVER_MAX_OPEN_CONNECTIONS" + SERVER_MAX_CONN_LIFETIME = "SERVER_MAX_CONN_LIFETIME" +) + +// Config struct type Config struct { - // Subscription settings - SubscriptionConfig shared.SubscriptionSettings - // Database settings + // Ubiquitous fields + Chain shared.ChainType + IPFSPath string + IPFSMode shared.IPFSMode DBConfig config.Database - // DB itself - DB *postgres.DB - // Subscription client - Client interface{} - // WASM instantiation paths and namespaces - WASMFunctions []wasm.WasmFunction - // File paths for trigger functions (sql files) that (can) use the instantiated wasm namespaces - TriggerFunctions []string - // Chain type used to specify what type of raw data we will be processing - Chain shared.ChainType - // Source type used to specify which streamer to use based on what API we will be interfacing with - Source shared2.SourceType - // Info for the node - NodeInfo core.Node + // Server fields + Serve bool + ServeDBConn *postgres.DB + WSEndpoint string + HTTPEndpoint string + IPCEndpoint string + // Sync params + Sync bool + SyncDBConn *postgres.DB + Workers int + WSClient interface{} + NodeInfo core.Node + // Backfiller params + BackFill bool + BackFillDBConn *postgres.DB + HTTPClient interface{} + Frequency time.Duration + BatchSize uint64 + BatchNumber uint64 + ValidationLevel int + Timeout time.Duration // HTTP connection timeout in seconds } -func NewWatcherConfig() (*Config, error) { +// NewSuperNodeConfig is used to initialize a SuperNode config from a .toml file +// Separate chain supernode instances need to be ran with separate ipfs path in order to avoid lock contention on the ipfs repository lockfile +func NewSuperNodeConfig() (*Config, error) { c := new(Config) var err error - chain := viper.GetString("watcher.chain") + + viper.BindEnv("superNode.chain", SUPERNODE_CHAIN) + viper.BindEnv("superNode.sync", SUPERNODE_SYNC) + viper.BindEnv("superNode.workers", SUPERNODE_WORKERS) + viper.BindEnv("ethereum.wsPath", shared.ETH_WS_PATH) + viper.BindEnv("bitcoin.wsPath", shared.BTC_WS_PATH) + viper.BindEnv("superNode.server", SUPERNODE_SERVER) + viper.BindEnv("superNode.wsPath", SUPERNODE_WS_PATH) + viper.BindEnv("superNode.ipcPath", SUPERNODE_IPC_PATH) + viper.BindEnv("superNode.httpPath", SUPERNODE_HTTP_PATH) + viper.BindEnv("superNode.backFill", SUPERNODE_BACKFILL) + + chain := viper.GetString("superNode.chain") c.Chain, err = shared.NewChainType(chain) if err != nil { return nil, err } - switch c.Chain { - case shared.Ethereum: - c.SubscriptionConfig, err = eth.NewEthSubscriptionConfig() - if err != nil { - return nil, err - } - case shared.Bitcoin: - c.SubscriptionConfig, err = btc.NewBtcSubscriptionConfig() - if err != nil { - return nil, err - } - case shared.Omni: - return nil, errors.New("omni chain type currently not supported") - default: - return nil, fmt.Errorf("unexpected chain type %s", c.Chain.String()) - } - sourcePath := viper.GetString("watcher.dataSource") - if sourcePath == "" { - sourcePath = "ws://127.0.0.1:8080" // default to and try the default ws url if no path is provided - } - sourceType := viper.GetString("watcher.dataPath") - c.Source, err = shared2.NewSourceType(sourceType) + + c.IPFSMode, err = shared.GetIPFSMode() if err != nil { return nil, err } - switch c.Source { - case shared2.Ethereum: - return nil, errors.New("ethereum data source currently not supported") - case shared2.Bitcoin: - return nil, errors.New("bitcoin data source currently not supported") - case shared2.VulcanizeDB: - rawRPCClient, err := rpc.Dial(sourcePath) + if c.IPFSMode == shared.LocalInterface || c.IPFSMode == shared.RemoteClient { + c.IPFSPath, err = shared.GetIPFSPath() if err != nil { return nil, err } - cli := client.NewRPCClient(rawRPCClient, sourcePath) - var nodeInfo core.Node - if err := cli.CallContext(context.Background(), &nodeInfo, "vdb_node"); err != nil { + } + + c.DBConfig.Init() + + c.Sync = viper.GetBool("superNode.sync") + if c.Sync { + workers := viper.GetInt("superNode.workers") + if workers < 1 { + workers = 1 + } + c.Workers = workers + switch c.Chain { + case shared.Ethereum: + ethWS := viper.GetString("ethereum.wsPath") + c.NodeInfo, c.WSClient, err = shared.GetEthNodeAndClient(fmt.Sprintf("ws://%s", ethWS)) + if err != nil { + return nil, err + } + case shared.Bitcoin: + btcWS := viper.GetString("bitcoin.wsPath") + c.NodeInfo, c.WSClient = shared.GetBtcNodeAndClient(btcWS) + } + syncDBConn := overrideDBConnConfig(c.DBConfig, Sync) + syncDB := utils.LoadPostgres(syncDBConn, c.NodeInfo) + c.SyncDBConn = &syncDB + } + + c.Serve = viper.GetBool("superNode.server") + if c.Serve { + wsPath := viper.GetString("superNode.wsPath") + if wsPath == "" { + wsPath = "127.0.0.1:8080" + } + c.WSEndpoint = wsPath + ipcPath := viper.GetString("superNode.ipcPath") + if ipcPath == "" { + home, err := os.UserHomeDir() + if err != nil { + return nil, err + } + ipcPath = filepath.Join(home, ".vulcanize/vulcanize.ipc") + } + c.IPCEndpoint = ipcPath + httpPath := viper.GetString("superNode.httpPath") + if httpPath == "" { + httpPath = "127.0.0.1:8081" + } + c.HTTPEndpoint = httpPath + serveDBConn := overrideDBConnConfig(c.DBConfig, Serve) + serveDB := utils.LoadPostgres(serveDBConn, c.NodeInfo) + c.ServeDBConn = &serveDB + } + + c.BackFill = viper.GetBool("superNode.backFill") + if c.BackFill { + if err := c.BackFillFields(); err != nil { return nil, err } - c.NodeInfo = nodeInfo - c.Client = cli - default: - return nil, fmt.Errorf("unexpected data source type %s", c.Source.String()) } - wasmBinaries := viper.GetStringSlice("watcher.wasmBinaries") - wasmNamespaces := viper.GetStringSlice("watcher.wasmNamespaces") - if len(wasmBinaries) != len(wasmNamespaces) { - return nil, fmt.Errorf("watcher config needs a namespace for every wasm binary\r\nhave %d binaries and %d namespaces", len(wasmBinaries), len(wasmNamespaces)) - } - c.WASMFunctions = make([]wasm.WasmFunction, len(wasmBinaries)) - for i, bin := range wasmBinaries { - c.WASMFunctions[i] = wasm.WasmFunction{ - BinaryPath: bin, - Namespace: wasmNamespaces[i], - } - } - c.TriggerFunctions = viper.GetStringSlice("watcher.triggerFunctions") - c.DBConfig = config.Database{ - Name: viper.GetString("watcher.database.name"), - Hostname: viper.GetString("watcher.database.hostname"), - Port: viper.GetInt("watcher.database.port"), - User: viper.GetString("watcher.database.user"), - Password: viper.GetString("watcher.database.password"), - } - db := utils.LoadPostgres(c.DBConfig, c.NodeInfo) - c.DB = &db + return c, nil } + +// BackFillFields is used to fill in the BackFill fields of the config +func (c *Config) BackFillFields() error { + var err error + + viper.BindEnv("ethereum.httpPath", shared.ETH_HTTP_PATH) + viper.BindEnv("bitcoin.httpPath", shared.BTC_HTTP_PATH) + viper.BindEnv("superNode.frequency", SUPERNODE_FREQUENCY) + viper.BindEnv("superNode.batchSize", SUPERNODE_BATCH_SIZE) + viper.BindEnv("superNode.batchNumber", SUPERNODE_BATCH_NUMBER) + viper.BindEnv("superNode.validationLevel", SUPERNODE_VALIDATION_LEVEL) + viper.BindEnv("superNode.timeout", shared.HTTP_TIMEOUT) + + timeout := viper.GetInt("superNode.timeout") + if timeout < 15 { + timeout = 15 + } + c.Timeout = time.Second * time.Duration(timeout) + + switch c.Chain { + case shared.Ethereum: + ethHTTP := viper.GetString("ethereum.httpPath") + c.NodeInfo, c.HTTPClient, err = shared.GetEthNodeAndClient(fmt.Sprintf("http://%s", ethHTTP)) + if err != nil { + return err + } + case shared.Bitcoin: + btcHTTP := viper.GetString("bitcoin.httpPath") + c.NodeInfo, c.HTTPClient = shared.GetBtcNodeAndClient(btcHTTP) + } + + freq := viper.GetInt("superNode.frequency") + var frequency time.Duration + if freq <= 0 { + frequency = time.Second * 30 + } else { + frequency = time.Second * time.Duration(freq) + } + c.Frequency = frequency + c.BatchSize = uint64(viper.GetInt64("superNode.batchSize")) + c.BatchNumber = uint64(viper.GetInt64("superNode.batchNumber")) + c.ValidationLevel = viper.GetInt("superNode.validationLevel") + + backFillDBConn := overrideDBConnConfig(c.DBConfig, BackFill) + backFillDB := utils.LoadPostgres(backFillDBConn, c.NodeInfo) + c.BackFillDBConn = &backFillDB + return nil +} + +type mode string + +var ( + Sync mode = "sync" + BackFill mode = "backFill" + Serve mode = "serve" +) + +func overrideDBConnConfig(con config.Database, m mode) config.Database { + switch m { + case Sync: + viper.BindEnv("database.sync.maxIdle", SYNC_MAX_IDLE_CONNECTIONS) + viper.BindEnv("database.sync.maxOpen", SYNC_MAX_OPEN_CONNECTIONS) + viper.BindEnv("database.sync.maxLifetime", SYNC_MAX_CONN_LIFETIME) + con.MaxIdle = viper.GetInt("database.sync.maxIdle") + con.MaxOpen = viper.GetInt("database.sync.maxOpen") + con.MaxLifetime = viper.GetInt("database.sync.maxLifetime") + case BackFill: + viper.BindEnv("database.backFill.maxIdle", BACKFILL_MAX_IDLE_CONNECTIONS) + viper.BindEnv("database.backFill.maxOpen", BACKFILL_MAX_OPEN_CONNECTIONS) + viper.BindEnv("database.backFill.maxLifetime", BACKFILL_MAX_CONN_LIFETIME) + con.MaxIdle = viper.GetInt("database.backFill.maxIdle") + con.MaxOpen = viper.GetInt("database.backFill.maxOpen") + con.MaxLifetime = viper.GetInt("database.backFill.maxLifetime") + case Serve: + viper.BindEnv("database.server.maxIdle", SERVER_MAX_IDLE_CONNECTIONS) + viper.BindEnv("database.server.maxOpen", SERVER_MAX_OPEN_CONNECTIONS) + viper.BindEnv("database.server.maxLifetime", SERVER_MAX_CONN_LIFETIME) + con.MaxIdle = viper.GetInt("database.server.maxIdle") + con.MaxOpen = viper.GetInt("database.server.maxOpen") + con.MaxLifetime = viper.GetInt("database.server.maxLifetime") + default: + } + return con +} diff --git a/pkg/watcher/constructors.go b/pkg/watcher/constructors.go index f8112c49..7564694e 100644 --- a/pkg/watcher/constructors.go +++ b/pkg/watcher/constructors.go @@ -18,36 +18,202 @@ package watcher import ( "fmt" + "time" - "github.com/vulcanize/vulcanizedb/libraries/shared/streamer" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" - "github.com/vulcanize/vulcanizedb/pkg/postgres" - shared2 "github.com/vulcanize/vulcanizedb/pkg/super_node/shared" - "github.com/vulcanize/vulcanizedb/pkg/watcher/eth" - "github.com/vulcanize/vulcanizedb/pkg/watcher/shared" + "github.com/btcsuite/btcd/chaincfg" + "github.com/btcsuite/btcd/rpcclient" + "github.com/ethereum/go-ethereum/params" + "github.com/ethereum/go-ethereum/rpc" + + "github.com/vulcanize/ipfs-chain-watcher/pkg/btc" + "github.com/vulcanize/ipfs-chain-watcher/pkg/eth" + "github.com/vulcanize/ipfs-chain-watcher/pkg/postgres" + "github.com/vulcanize/ipfs-chain-watcher/pkg/shared" ) -// NewSuperNodeStreamer returns a new shared.SuperNodeStreamer -func NewSuperNodeStreamer(source shared.SourceType, client interface{}) (shared.SuperNodeStreamer, error) { - switch source { - case shared.VulcanizeDB: - cli, ok := client.(core.RPCClient) - if !ok { - var expectedClientType core.RPCClient - return nil, fmt.Errorf("vulcanizedb NewSuperNodeStreamer construct expects client type %T got %T", expectedClientType, client) - } - return streamer.NewSuperNodeStreamer(cli), nil +// NewResponseFilterer constructs a ResponseFilterer for the provided chain type +func NewResponseFilterer(chain shared.ChainType) (shared.ResponseFilterer, error) { + switch chain { + case shared.Ethereum: + return eth.NewResponseFilterer(), nil + case shared.Bitcoin: + return btc.NewResponseFilterer(), nil default: - return nil, fmt.Errorf("NewSuperNodeStreamer constructor unexpected souce type %s", source.String()) + return nil, fmt.Errorf("invalid chain %s for filterer constructor", chain.String()) } } -// NewRepository constructs and returns a new Repository that satisfies the shared.Repository interface for the specified chain -func NewRepository(chain shared2.ChainType, db *postgres.DB, triggerFuncs []string) (shared.Repository, error) { +// NewCIDIndexer constructs a CIDIndexer for the provided chain type +func NewCIDIndexer(chain shared.ChainType, db *postgres.DB, ipfsMode shared.IPFSMode) (shared.CIDIndexer, error) { switch chain { - case shared2.Ethereum: - return eth.NewRepository(db, triggerFuncs), nil + case shared.Ethereum: + switch ipfsMode { + case shared.LocalInterface, shared.RemoteClient: + return eth.NewCIDIndexer(db), nil + case shared.DirectPostgres: + return eth.NewIPLDPublisherAndIndexer(db), nil + default: + return nil, fmt.Errorf("ethereum CIDIndexer unexpected ipfs mode %s", ipfsMode.String()) + } + case shared.Bitcoin: + switch ipfsMode { + case shared.LocalInterface, shared.RemoteClient: + return btc.NewCIDIndexer(db), nil + case shared.DirectPostgres: + return eth.NewIPLDPublisherAndIndexer(db), nil + default: + return nil, fmt.Errorf("bitcoin CIDIndexer unexpected ipfs mode %s", ipfsMode.String()) + } default: - return nil, fmt.Errorf("NewRepository constructor unexpected chain type %s", chain.String()) + return nil, fmt.Errorf("invalid chain %s for indexer constructor", chain.String()) + } +} + +// NewCIDRetriever constructs a CIDRetriever for the provided chain type +func NewCIDRetriever(chain shared.ChainType, db *postgres.DB) (shared.CIDRetriever, error) { + switch chain { + case shared.Ethereum: + return eth.NewCIDRetriever(db), nil + case shared.Bitcoin: + return btc.NewCIDRetriever(db), nil + default: + return nil, fmt.Errorf("invalid chain %s for retriever constructor", chain.String()) + } +} + +// NewPayloadStreamer constructs a PayloadStreamer for the provided chain type +func NewPayloadStreamer(chain shared.ChainType, clientOrConfig interface{}) (shared.PayloadStreamer, chan shared.RawChainData, error) { + switch chain { + case shared.Ethereum: + ethClient, ok := clientOrConfig.(*rpc.Client) + if !ok { + return nil, nil, fmt.Errorf("ethereum payload streamer constructor expected client type %T got %T", &rpc.Client{}, clientOrConfig) + } + streamChan := make(chan shared.RawChainData, eth.PayloadChanBufferSize) + return eth.NewPayloadStreamer(ethClient), streamChan, nil + case shared.Bitcoin: + btcClientConn, ok := clientOrConfig.(*rpcclient.ConnConfig) + if !ok { + return nil, nil, fmt.Errorf("bitcoin payload streamer constructor expected client config type %T got %T", rpcclient.ConnConfig{}, clientOrConfig) + } + streamChan := make(chan shared.RawChainData, btc.PayloadChanBufferSize) + return btc.NewHTTPPayloadStreamer(btcClientConn), streamChan, nil + default: + return nil, nil, fmt.Errorf("invalid chain %s for streamer constructor", chain.String()) + } +} + +// NewPaylaodFetcher constructs a PayloadFetcher for the provided chain type +func NewPaylaodFetcher(chain shared.ChainType, client interface{}, timeout time.Duration) (shared.PayloadFetcher, error) { + switch chain { + case shared.Ethereum: + batchClient, ok := client.(*rpc.Client) + if !ok { + return nil, fmt.Errorf("ethereum payload fetcher constructor expected client type %T got %T", &rpc.Client{}, client) + } + return eth.NewPayloadFetcher(batchClient, timeout), nil + case shared.Bitcoin: + connConfig, ok := client.(*rpcclient.ConnConfig) + if !ok { + return nil, fmt.Errorf("bitcoin payload fetcher constructor expected client type %T got %T", &rpcclient.Client{}, client) + } + return btc.NewPayloadFetcher(connConfig) + default: + return nil, fmt.Errorf("invalid chain %s for payload fetcher constructor", chain.String()) + } +} + +// NewPayloadConverter constructs a PayloadConverter for the provided chain type +func NewPayloadConverter(chain shared.ChainType) (shared.PayloadConverter, error) { + switch chain { + case shared.Ethereum: + return eth.NewPayloadConverter(params.MainnetChainConfig), nil + case shared.Bitcoin: + return btc.NewPayloadConverter(&chaincfg.MainNetParams), nil + default: + return nil, fmt.Errorf("invalid chain %s for converter constructor", chain.String()) + } +} + +// NewIPLDFetcher constructs an IPLDFetcher for the provided chain type +func NewIPLDFetcher(chain shared.ChainType, ipfsPath string, db *postgres.DB, ipfsMode shared.IPFSMode) (shared.IPLDFetcher, error) { + switch chain { + case shared.Ethereum: + switch ipfsMode { + case shared.LocalInterface, shared.RemoteClient: + return eth.NewIPLDFetcher(ipfsPath) + case shared.DirectPostgres: + return eth.NewIPLDPGFetcher(db), nil + default: + return nil, fmt.Errorf("ethereum IPLDFetcher unexpected ipfs mode %s", ipfsMode.String()) + } + case shared.Bitcoin: + switch ipfsMode { + case shared.LocalInterface, shared.RemoteClient: + return btc.NewIPLDFetcher(ipfsPath) + case shared.DirectPostgres: + return btc.NewIPLDPGFetcher(db), nil + default: + return nil, fmt.Errorf("bitcoin IPLDFetcher unexpected ipfs mode %s", ipfsMode.String()) + } + default: + return nil, fmt.Errorf("invalid chain %s for IPLD fetcher constructor", chain.String()) + } +} + +// NewIPLDPublisher constructs an IPLDPublisher for the provided chain type +func NewIPLDPublisher(chain shared.ChainType, ipfsPath string, db *postgres.DB, ipfsMode shared.IPFSMode) (shared.IPLDPublisher, error) { + switch chain { + case shared.Ethereum: + switch ipfsMode { + case shared.LocalInterface, shared.RemoteClient: + return eth.NewIPLDPublisher(ipfsPath) + case shared.DirectPostgres: + return eth.NewIPLDPublisherAndIndexer(db), nil + default: + return nil, fmt.Errorf("ethereum IPLDPublisher unexpected ipfs mode %s", ipfsMode.String()) + } + case shared.Bitcoin: + switch ipfsMode { + case shared.LocalInterface, shared.RemoteClient: + return btc.NewIPLDPublisher(ipfsPath) + case shared.DirectPostgres: + return btc.NewIPLDPublisherAndIndexer(db), nil + default: + return nil, fmt.Errorf("bitcoin IPLDPublisher unexpected ipfs mode %s", ipfsMode.String()) + } + default: + return nil, fmt.Errorf("invalid chain %s for publisher constructor", chain.String()) + } +} + +// NewPublicAPI constructs a PublicAPI for the provided chain type +func NewPublicAPI(chain shared.ChainType, db *postgres.DB, ipfsPath string) (rpc.API, error) { + switch chain { + case shared.Ethereum: + backend, err := eth.NewEthBackend(db) + if err != nil { + return rpc.API{}, err + } + return rpc.API{ + Namespace: eth.APIName, + Version: eth.APIVersion, + Service: eth.NewPublicEthAPI(backend), + Public: true, + }, nil + default: + return rpc.API{}, fmt.Errorf("invalid chain %s for public api constructor", chain.String()) + } +} + +// NewCleaner constructs a Cleaner for the provided chain type +func NewCleaner(chain shared.ChainType, db *postgres.DB) (shared.Cleaner, error) { + switch chain { + case shared.Ethereum: + return eth.NewCleaner(db), nil + case shared.Bitcoin: + return btc.NewCleaner(db), nil + default: + return nil, fmt.Errorf("invalid chain %s for cleaner constructor", chain.String()) } } diff --git a/pkg/watcher/eth/converter.go b/pkg/watcher/eth/converter.go deleted file mode 100644 index f0fc99ce..00000000 --- a/pkg/watcher/eth/converter.go +++ /dev/null @@ -1,174 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package eth - -import ( - "fmt" - "github.com/ethereum/go-ethereum/crypto" - - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/core/types" - "github.com/ethereum/go-ethereum/params" - "github.com/ethereum/go-ethereum/rlp" - - common2 "github.com/vulcanize/vulcanizedb/pkg/eth/converters/common" - "github.com/vulcanize/vulcanizedb/pkg/super_node/eth" - "github.com/vulcanize/vulcanizedb/pkg/super_node/shared" -) - -// WatcherConverter converts watched data into models for the trigger tables -type WatcherConverter struct { - chainConfig *params.ChainConfig -} - -// NewWatcherConverter creates a pointer to a new WatcherConverter -func NewWatcherConverter(chainConfig *params.ChainConfig) *WatcherConverter { - return &WatcherConverter{ - chainConfig: chainConfig, - } -} - -// Convert method is used to convert eth iplds to an cid payload -// Satisfies the shared.PayloadConverter interface -func (pc *WatcherConverter) Convert(ethIPLDs eth.IPLDs) (*eth.CIDPayload, error) { - numTxs := len(ethIPLDs.Transactions) - numRcts := len(ethIPLDs.Receipts) - if numTxs != numRcts { - return nil, fmt.Errorf("eth converter needs same numbe of receipts and transactions, have %d transactions and %d receipts", numTxs, numRcts) - } - // Initialize the payload struct and its fields - cids := new(eth.CIDPayload) - cids.UncleCIDs = make([]eth.UncleModel, len(ethIPLDs.Uncles)) - cids.TransactionCIDs = make([]eth.TxModel, numTxs) - cids.ReceiptCIDs = make(map[common.Hash]eth.ReceiptModel, numTxs) - cids.StateNodeCIDs = make([]eth.StateNodeModel, len(ethIPLDs.StateNodes)) - cids.StorageNodeCIDs = make(map[string][]eth.StorageNodeModel, len(ethIPLDs.StateNodes)) - - // Unpack header - var header types.Header - if err := rlp.DecodeBytes(ethIPLDs.Header.Data, &header); err != nil { - return nil, err - } - // Collect uncles so we can derive miner reward - uncles := make([]*types.Header, len(ethIPLDs.Uncles)) - for i, uncleIPLD := range ethIPLDs.Uncles { - var uncle types.Header - if err := rlp.DecodeBytes(uncleIPLD.Data, &uncle); err != nil { - return nil, err - } - uncleReward := common2.CalcUncleMinerReward(header.Number.Int64(), uncle.Number.Int64()) - uncles[i] = &uncle - // Uncle data - cids.UncleCIDs[i] = eth.UncleModel{ - CID: uncleIPLD.CID, - BlockHash: uncle.Hash().String(), - ParentHash: uncle.ParentHash.String(), - Reward: uncleReward.String(), - } - } - // Collect transactions so we can derive receipt fields and miner reward - signer := types.MakeSigner(pc.chainConfig, header.Number) - transactions := make(types.Transactions, len(ethIPLDs.Transactions)) - for i, txIPLD := range ethIPLDs.Transactions { - var tx types.Transaction - if err := rlp.DecodeBytes(txIPLD.Data, &tx); err != nil { - return nil, err - } - transactions[i] = &tx - from, err := types.Sender(signer, &tx) - if err != nil { - return nil, err - } - // Tx data - cids.TransactionCIDs[i] = eth.TxModel{ - Dst: shared.HandleNullAddrPointer(tx.To()), - Src: shared.HandleNullAddr(from), - TxHash: tx.Hash().String(), - Index: int64(i), - CID: txIPLD.CID, - } - } - // Collect receipts so that we can derive the rest of their fields and miner reward - receipts := make(types.Receipts, len(ethIPLDs.Receipts)) - for i, rctIPLD := range ethIPLDs.Receipts { - var rct types.Receipt - if err := rlp.DecodeBytes(rctIPLD.Data, &rct); err != nil { - return nil, err - } - receipts[i] = &rct - } - if err := receipts.DeriveFields(pc.chainConfig, header.Hash(), header.Number.Uint64(), transactions); err != nil { - return nil, err - } - for i, receipt := range receipts { - matchedTx := transactions[i] - topicSets := make([][]string, 4) - mappedContracts := make(map[string]bool) // use map to avoid duplicate addresses - for _, log := range receipt.Logs { - for i, topic := range log.Topics { - topicSets[i] = append(topicSets[i], topic.Hex()) - } - mappedContracts[log.Address.String()] = true - } - logContracts := make([]string, 0, len(mappedContracts)) - for addr := range mappedContracts { - logContracts = append(logContracts, addr) - } - contract := shared.HandleNullAddr(receipt.ContractAddress) - var contractHash string - if contract != "" { - contractHash = crypto.Keccak256Hash(common.Hex2Bytes(contract)).String() - } - // Rct data - cids.ReceiptCIDs[matchedTx.Hash()] = eth.ReceiptModel{ - CID: ethIPLDs.Receipts[i].CID, - Topic0s: topicSets[0], - Topic1s: topicSets[1], - Topic2s: topicSets[2], - Topic3s: topicSets[3], - ContractHash: contractHash, - LogContracts: logContracts, - } - } - minerReward := common2.CalcEthBlockReward(&header, uncles, transactions, receipts) - // Header data - cids.HeaderCID = eth.HeaderModel{ - CID: ethIPLDs.Header.CID, - ParentHash: header.ParentHash.String(), - BlockHash: header.Hash().String(), - BlockNumber: header.Number.String(), - TotalDifficulty: ethIPLDs.TotalDifficulty.String(), - Reward: minerReward.String(), - } - // State data - for i, stateIPLD := range ethIPLDs.StateNodes { - cids.StateNodeCIDs[i] = eth.StateNodeModel{ - CID: stateIPLD.IPLD.CID, - NodeType: eth.ResolveFromNodeType(stateIPLD.Type), - StateKey: stateIPLD.StateLeafKey.String(), - } - } - // Storage data - for _, storageIPLD := range ethIPLDs.StorageNodes { - cids.StorageNodeCIDs[storageIPLD.StateLeafKey.Hex()] = append(cids.StorageNodeCIDs[storageIPLD.StateLeafKey.Hex()], eth.StorageNodeModel{ - CID: storageIPLD.IPLD.CID, - NodeType: eth.ResolveFromNodeType(storageIPLD.Type), - StorageKey: storageIPLD.StorageLeafKey.String(), - }) - } - return cids, nil -} diff --git a/pkg/watcher/eth/repository.go b/pkg/watcher/eth/repository.go deleted file mode 100644 index 1791697e..00000000 --- a/pkg/watcher/eth/repository.go +++ /dev/null @@ -1,190 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package eth - -import ( - "io/ioutil" - - "github.com/ethereum/go-ethereum/params" - "github.com/ethereum/go-ethereum/rlp" - "github.com/sirupsen/logrus" - "github.com/vulcanize/vulcanizedb/pkg/postgres" - "github.com/vulcanize/vulcanizedb/pkg/super_node" - "github.com/vulcanize/vulcanizedb/pkg/super_node/eth" - "github.com/vulcanize/vulcanizedb/pkg/watcher/shared" -) - -var ( - vacuumThreshold int64 = 5000 -) - -// Repository is the underlying struct for satisfying the shared.Repository interface for eth -type Repository struct { - cidIndexer *eth.CIDIndexer - converter *WatcherConverter - db *postgres.DB - triggerFunctions []string - deleteCalls int64 -} - -// NewRepository returns a new eth.Repository that satisfies the shared.Repository interface -func NewRepository(db *postgres.DB, triggerFunctions []string) shared.Repository { - return &Repository{ - cidIndexer: eth.NewCIDIndexer(db), - converter: NewWatcherConverter(params.MainnetChainConfig), - db: db, - triggerFunctions: triggerFunctions, - deleteCalls: 0, - } -} - -// LoadTriggers is used to initialize Postgres trigger function -// this needs to be called after the wasm functions these triggers invoke have been instantiated in Postgres -func (r *Repository) LoadTriggers() error { - // TODO: enable loading of triggers from IPFS - tx, err := r.db.Beginx() - if err != nil { - return err - } - for _, funcPath := range r.triggerFunctions { - sqlFile, err := ioutil.ReadFile(funcPath) - if err != nil { - return err - } - sqlString := string(sqlFile) - if _, err := tx.Exec(sqlString); err != nil { - return err - } - - } - return tx.Commit() -} - -// QueueData puts super node payload data into the db queue -func (r *Repository) QueueData(payload super_node.SubscriptionPayload) error { - pgStr := `INSERT INTO eth.queued_data (data, height) VALUES ($1, $2) - ON CONFLICT (height) DO UPDATE SET (data) VALUES ($1)` - _, err := r.db.Exec(pgStr, payload.Data, payload.Height) - return err -} - -// GetQueueData grabs payload data from the queue table so that it can be readied -// Used ensure we enter data into the tables that triggers act on in sequential order, even if we receive data out-of-order -// Returns the queued data, the new index, and err -// Deletes from the queue the data it retrieves -// Periodically vacuum's the table to free up space from the deleted rows -func (r *Repository) GetQueueData(height int64) (super_node.SubscriptionPayload, int64, error) { - pgStr := `DELETE FROM eth.queued_data - WHERE height = $1 - RETURNING *` - var res shared.QueuedData - if err := r.db.Get(&res, pgStr, height); err != nil { - return super_node.SubscriptionPayload{}, height, err - } - // If the delete get query succeeded, increment deleteCalls and height and prep payload to return - r.deleteCalls++ - height++ - payload := super_node.SubscriptionPayload{ - Data: res.Data, - Height: res.Height, - Flag: super_node.EmptyFlag, - } - // Periodically clean up space in the queued data table - if r.deleteCalls >= vacuumThreshold { - _, err := r.db.Exec(`VACUUM ANALYZE eth.queued_data`) - if err != nil { - logrus.Error(err) - } - r.deleteCalls = 0 - } - return payload, height, nil -} - -// ReadyData puts data in the tables ready for processing by trigger functions -func (r *Repository) ReadyData(payload super_node.SubscriptionPayload) error { - var ethIPLDs eth.IPLDs - if err := rlp.DecodeBytes(payload.Data, ðIPLDs); err != nil { - return err - } - if err := r.readyIPLDs(ethIPLDs); err != nil { - return err - } - cids, err := r.converter.Convert(ethIPLDs) - if err != nil { - return err - } - // Use indexer to persist all of the cid meta data - // trigger functions will act on these tables - return r.cidIndexer.Index(cids) -} - -// readyIPLDs adds IPLDs directly to the Postgres `blocks` table, rather than going through an IPFS node -func (r *Repository) readyIPLDs(ethIPLDs eth.IPLDs) error { - tx, err := r.db.Beginx() - if err != nil { - return err - } - pgStr := `INSERT INTO blocks (key, data) VALUES ($1, $2) - ON CONFLICT (key) DO UPDATE SET (data) = ($2)` - if _, err := tx.Exec(pgStr, ethIPLDs.Header.CID, ethIPLDs.Header.Data); err != nil { - if err := tx.Rollback(); err != nil { - logrus.Error(err) - } - return err - } - for _, uncle := range ethIPLDs.Uncles { - if _, err := tx.Exec(pgStr, uncle.CID, uncle.Data); err != nil { - if err := tx.Rollback(); err != nil { - logrus.Error(err) - } - return err - } - } - for _, trx := range ethIPLDs.Transactions { - if _, err := tx.Exec(pgStr, trx.CID, trx.Data); err != nil { - if err := tx.Rollback(); err != nil { - logrus.Error(err) - } - return err - } - } - for _, rct := range ethIPLDs.Receipts { - if _, err := tx.Exec(pgStr, rct.CID, rct.Data); err != nil { - if err := tx.Rollback(); err != nil { - logrus.Error(err) - } - return err - } - } - for _, state := range ethIPLDs.StateNodes { - if _, err := tx.Exec(pgStr, state.IPLD.CID, state.IPLD.Data); err != nil { - if err := tx.Rollback(); err != nil { - logrus.Error(err) - } - return err - } - } - for _, storage := range ethIPLDs.StorageNodes { - if _, err := tx.Exec(pgStr, storage.IPLD.CID, storage.IPLD.Data); err != nil { - if err := tx.Rollback(); err != nil { - logrus.Error(err) - } - return err - } - } - return nil -} diff --git a/pkg/watcher/example/sql/transfer_table.sql b/pkg/watcher/example/sql/transfer_table.sql deleted file mode 100644 index 895b6a2a..00000000 --- a/pkg/watcher/example/sql/transfer_table.sql +++ /dev/null @@ -1,10 +0,0 @@ -CREATE TABLE eth.token_transfers ( - id SERIAL PRIMARY KEY, - receipt_id INTEGER NOT NULL REFERENCES eth.receipt_cids (id) ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED, - log_index INTEGER NOT NULL, - contract_address VARCHAR(66) NOT NULL, - src VARCHAR(66) NOT NULL, - dst VARCHAR(66) NOT NULL, - amount NUMERIC NOT NULL, - UNIQUE (receipt_id, log_index) -); \ No newline at end of file diff --git a/pkg/watcher/example/sql/transfer_trigger.sql b/pkg/watcher/example/sql/transfer_trigger.sql deleted file mode 100644 index bb3700b7..00000000 --- a/pkg/watcher/example/sql/transfer_trigger.sql +++ /dev/null @@ -1,7 +0,0 @@ -CREATE OR REPLACE FUNCTION transfer_trigger() RETURNS trigger AS -$BODY$ -BEGIN - SELECT * - -END; -$BODY$ \ No newline at end of file diff --git a/pkg/super_node/helpers.go b/pkg/watcher/helpers.go similarity index 98% rename from pkg/super_node/helpers.go rename to pkg/watcher/helpers.go index 4a4b077b..b6b9af81 100644 --- a/pkg/super_node/helpers.go +++ b/pkg/watcher/helpers.go @@ -14,7 +14,7 @@ // You should have received a copy of the GNU Affero General Public License // along with this program. If not, see . -package super_node +package watcher import log "github.com/sirupsen/logrus" diff --git a/pkg/watcher/service.go b/pkg/watcher/service.go index 8081f6a9..2c251d90 100644 --- a/pkg/watcher/service.go +++ b/pkg/watcher/service.go @@ -17,204 +17,505 @@ package watcher import ( + "fmt" "sync" - "sync/atomic" - "time" + "github.com/ethereum/go-ethereum/common" + "github.com/ethereum/go-ethereum/crypto" + "github.com/ethereum/go-ethereum/node" + "github.com/ethereum/go-ethereum/p2p" "github.com/ethereum/go-ethereum/rlp" - "github.com/ethereum/go-ethereum/rpc" - "github.com/sirupsen/logrus" + log "github.com/sirupsen/logrus" - "github.com/vulcanize/vulcanizedb/pkg/super_node" - "github.com/vulcanize/vulcanizedb/pkg/wasm" - "github.com/vulcanize/vulcanizedb/pkg/watcher/shared" + "github.com/vulcanize/ipfs-chain-watcher/pkg/core" + "github.com/vulcanize/ipfs-chain-watcher/pkg/postgres" + "github.com/vulcanize/ipfs-chain-watcher/pkg/shared" ) -// Watcher is the top level interface for watching data from super node -type Watcher interface { - Init() error - Watch(wg *sync.WaitGroup) error +const ( + PayloadChanBufferSize = 2000 +) + +// SuperNode is the top level interface for streaming, converting to IPLDs, publishing, +// and indexing all chain data; screening this data; and serving it up to subscribed clients +// This service is compatible with the Ethereum service interface (node.Service) +type SuperNode interface { + // APIs(), Protocols(), Start() and Stop() + node.Service + // Data processing event loop + Sync(wg *sync.WaitGroup, forwardPayloadChan chan<- shared.ConvertedData) error + // Pub-Sub handling event loop + Serve(wg *sync.WaitGroup, screenAndServePayload <-chan shared.ConvertedData) + // Method to subscribe to the service + Subscribe(id rpc.ID, sub chan<- SubscriptionPayload, quitChan chan<- bool, params shared.SubscriptionSettings) + // Method to unsubscribe from the service + Unsubscribe(id rpc.ID) + // Method to access the node info for the service + Node() *core.Node + // Method to access chain type + Chain() shared.ChainType } -// Service is the underlying struct for the SuperNodeWatcher +// Service is the underlying struct for the super node type Service struct { - // Config - WatcherConfig *Config - // Interface for streaming data from super node - SuperNodeStreamer shared.SuperNodeStreamer - // Interface for db operations - Repository shared.Repository - // WASM instantiator - WASMIniter *wasm.Instantiator - - // Channels for process communication/data relay - PayloadChan chan super_node.SubscriptionPayload - QuitChan chan bool - - // Indexes - payloadIndex *int64 - endingIndex int64 + // Used to sync access to the Subscriptions + sync.Mutex + // Interface for streaming payloads over an rpc subscription + Streamer shared.PayloadStreamer + // Interface for converting raw payloads into IPLD object payloads + Converter shared.PayloadConverter + // Interface for publishing the IPLD payloads to IPFS + Publisher shared.IPLDPublisher + // Interface for indexing the CIDs of the published IPLDs in Postgres + Indexer shared.CIDIndexer + // Interface for filtering and serving data according to subscribed clients according to their specification + Filterer shared.ResponseFilterer + // Interface for fetching IPLD objects from IPFS + IPLDFetcher shared.IPLDFetcher + // Interface for searching and retrieving CIDs from Postgres index + Retriever shared.CIDRetriever + // Chan the processor uses to subscribe to payloads from the Streamer + PayloadChan chan shared.RawChainData + // Used to signal shutdown of the service + QuitChan chan bool + // A mapping of rpc.IDs to their subscription channels, mapped to their subscription type (hash of the StreamFilters) + Subscriptions map[common.Hash]map[rpc.ID]Subscription + // A mapping of subscription params hash to the corresponding subscription params + SubscriptionTypes map[common.Hash]shared.SubscriptionSettings + // Info for the Geth node that this super node is working with + NodeInfo *core.Node + // Number of publishAndIndex workers + WorkerPoolSize int + // chain type for this service + chain shared.ChainType + // Path to ipfs data dir + ipfsPath string + // Underlying db + db *postgres.DB + // wg for syncing serve processes + serveWg *sync.WaitGroup } -// NewWatcher returns a new Service which satisfies the Watcher interface -func NewWatcher(c *Config, quitChan chan bool) (Watcher, error) { - repo, err := NewRepository(c.SubscriptionConfig.ChainType(), c.DB, c.TriggerFunctions) +// NewSuperNode creates a new super_node.Interface using an underlying super_node.Service struct +func NewSuperNode(settings *Config) (SuperNode, error) { + sn := new(Service) + var err error + // If we are syncing, initialize the needed interfaces + if settings.Sync { + sn.Streamer, sn.PayloadChan, err = NewPayloadStreamer(settings.Chain, settings.WSClient) + if err != nil { + return nil, err + } + sn.Converter, err = NewPayloadConverter(settings.Chain) + if err != nil { + return nil, err + } + sn.Publisher, err = NewIPLDPublisher(settings.Chain, settings.IPFSPath, settings.SyncDBConn, settings.IPFSMode) + if err != nil { + return nil, err + } + sn.Indexer, err = NewCIDIndexer(settings.Chain, settings.SyncDBConn, settings.IPFSMode) + if err != nil { + return nil, err + } + sn.Filterer, err = NewResponseFilterer(settings.Chain) + if err != nil { + return nil, err + } + } + // If we are serving, initialize the needed interfaces + if settings.Serve { + sn.Retriever, err = NewCIDRetriever(settings.Chain, settings.ServeDBConn) + if err != nil { + return nil, err + } + sn.IPLDFetcher, err = NewIPLDFetcher(settings.Chain, settings.IPFSPath, settings.ServeDBConn, settings.IPFSMode) + if err != nil { + return nil, err + } + sn.db = settings.ServeDBConn + } + sn.QuitChan = make(chan bool) + sn.Subscriptions = make(map[common.Hash]map[rpc.ID]Subscription) + sn.SubscriptionTypes = make(map[common.Hash]shared.SubscriptionSettings) + sn.WorkerPoolSize = settings.Workers + sn.NodeInfo = &settings.NodeInfo + sn.ipfsPath = settings.IPFSPath + sn.chain = settings.Chain + return sn, nil +} + +// Protocols exports the services p2p protocols, this service has none +func (sap *Service) Protocols() []p2p.Protocol { + return []p2p.Protocol{} +} + +// APIs returns the RPC descriptors the super node service offers +func (sap *Service) APIs() []rpc.API { + ifnoAPI := NewInfoAPI() + apis := []rpc.API{ + { + Namespace: APIName, + Version: APIVersion, + Service: NewPublicSuperNodeAPI(sap), + Public: true, + }, + { + Namespace: "rpc", + Version: APIVersion, + Service: ifnoAPI, + Public: true, + }, + { + Namespace: "net", + Version: APIVersion, + Service: ifnoAPI, + Public: true, + }, + { + Namespace: "admin", + Version: APIVersion, + Service: ifnoAPI, + Public: true, + }, + } + chainAPI, err := NewPublicAPI(sap.chain, sap.db, sap.ipfsPath) if err != nil { - return nil, err + log.Error(err) + return apis } - streamer, err := NewSuperNodeStreamer(c.Source, c.Client) - if err != nil { - return nil, err - } - return &Service{ - WatcherConfig: c, - SuperNodeStreamer: streamer, - Repository: repo, - WASMIniter: wasm.NewWASMInstantiator(c.DB, c.WASMFunctions), - PayloadChan: make(chan super_node.SubscriptionPayload, super_node.PayloadChanBufferSize), - QuitChan: quitChan, - }, nil + return append(apis, chainAPI) } -// Init is used to initialize the Postgres WASM and trigger functions -func (s *Service) Init() error { - // Instantiate the Postgres WASM functions - if err := s.WASMIniter.Instantiate(); err != nil { - return err - } - // Load the Postgres trigger functions that (can) use - return s.Repository.LoadTriggers() -} - -// Watch is the top level loop for watching -func (s *Service) Watch(wg *sync.WaitGroup) error { - rlpConfig, err := rlp.EncodeToBytes(s.WatcherConfig.SubscriptionConfig) +// Sync streams incoming raw chain data and converts it for further processing +// It forwards the converted data to the publishAndIndex process(es) it spins up +// If forwards the converted data to a ScreenAndServe process if it there is one listening on the passed screenAndServePayload channel +// This continues on no matter if or how many subscribers there are +func (sap *Service) Sync(wg *sync.WaitGroup, screenAndServePayload chan<- shared.ConvertedData) error { + sub, err := sap.Streamer.Stream(sap.PayloadChan) if err != nil { return err } - sub, err := s.SuperNodeStreamer.Stream(s.PayloadChan, rlpConfig) - if err != nil { - return err - } - atomic.StoreInt64(s.payloadIndex, s.WatcherConfig.SubscriptionConfig.StartingBlock().Int64()) - s.endingIndex = s.WatcherConfig.SubscriptionConfig.EndingBlock().Int64() // less than 0 => never end - backFillOnly := s.WatcherConfig.SubscriptionConfig.HistoricalDataOnly() - if backFillOnly { // we are only processing historical data => handle single contiguous stream - s.backFillOnlyQueuing(wg, sub) - } else { // otherwise we need to be prepared to handle out-of-order data - s.combinedQueuing(wg, sub) + // spin up publishAndIndex worker goroutines + publishAndIndexPayload := make(chan shared.ConvertedData, PayloadChanBufferSize) + for i := 1; i <= sap.WorkerPoolSize; i++ { + go sap.publishAndIndex(wg, i, publishAndIndexPayload) + log.Debugf("%s publishAndIndex worker %d successfully spun up", sap.chain.String(), i) } + go func() { + wg.Add(1) + defer wg.Done() + for { + select { + case payload := <-sap.PayloadChan: + ipldPayload, err := sap.Converter.Convert(payload) + if err != nil { + log.Errorf("super node conversion error for chain %s: %v", sap.chain.String(), err) + continue + } + log.Infof("%s data streamed at head height %d", sap.chain.String(), ipldPayload.Height()) + // If we have a ScreenAndServe process running, forward the iplds to it + select { + case screenAndServePayload <- ipldPayload: + default: + } + // Forward the payload to the publishAndIndex workers + // this channel acts as a ring buffer + select { + case publishAndIndexPayload <- ipldPayload: + default: + <-publishAndIndexPayload + publishAndIndexPayload <- ipldPayload + } + case err := <-sub.Err(): + log.Errorf("super node subscription error for chain %s: %v", sap.chain.String(), err) + case <-sap.QuitChan: + log.Infof("quiting %s Sync process", sap.chain.String()) + return + } + } + }() + log.Infof("%s Sync goroutine successfully spun up", sap.chain.String()) return nil } -// combinedQueuing assumes data is not necessarily going to come in linear order -// this is true when we are backfilling and streaming at the head or when we are -// only streaming at the head since reorgs can occur - -// NOTE: maybe we should push everything to the wait queue, otherwise the index could be shifted as we retrieve data from it -func (s *Service) combinedQueuing(wg *sync.WaitGroup, sub *rpc.ClientSubscription) { +// publishAndIndex is spun up by SyncAndConvert and receives converted chain data from that process +// it publishes this data to IPFS and indexes their CIDs with useful metadata in Postgres +func (sap *Service) publishAndIndex(wg *sync.WaitGroup, id int, publishAndIndexPayload <-chan shared.ConvertedData) { wg.Add(1) - // This goroutine is responsible for allocating incoming data to the ready or wait queue - // depending on if it is at the current index or not - forwardQuit := make(chan bool) + defer wg.Done() + for { + select { + case payload := <-publishAndIndexPayload: + log.Debugf("%s super node publishAndIndex worker %d publishing data streamed at head height %d", sap.chain.String(), id, payload.Height()) + cidPayload, err := sap.Publisher.Publish(payload) + if err != nil { + log.Errorf("%s super node publishAndIndex worker %d publishing error: %v", sap.chain.String(), id, err) + continue + } + log.Debugf("%s super node publishAndIndex worker %d indexing data streamed at head height %d", sap.chain.String(), id, payload.Height()) + if err := sap.Indexer.Index(cidPayload); err != nil { + log.Errorf("%s super node publishAndIndex worker %d indexing error: %v", sap.chain.String(), id, err) + } + case <-sap.QuitChan: + log.Infof("%s super node publishAndIndex worker %d shutting down", sap.chain.String(), id) + return + } + } +} + +// Serve listens for incoming converter data off the screenAndServePayload from the Sync process +// It filters and sends this data to any subscribers to the service +// This process can also be stood up alone, without an screenAndServePayload attached to a Sync process +// and it will hang on the WaitGroup indefinitely, allowing the Service to serve historical data requests only +func (sap *Service) Serve(wg *sync.WaitGroup, screenAndServePayload <-chan shared.ConvertedData) { + sap.serveWg = wg go func() { + wg.Add(1) + defer wg.Done() for { select { - case payload := <-s.PayloadChan: - // If there is an error associated with the payload, log it and continue - if payload.Error() != nil { - logrus.Error(payload.Error()) - continue - } - if payload.Height == atomic.LoadInt64(s.payloadIndex) { - // If the data is at our current index it is ready to be processed - // add it to the ready data queue and increment the index - if err := s.Repository.ReadyData(payload); err != nil { - logrus.Error(err) - } - // Increment the current index and if we have exceeded our ending height shut down the watcher - if atomic.AddInt64(s.payloadIndex, 1) > s.endingIndex { - logrus.Info("Watcher has reached ending block height, shutting down") - forwardQuit <- true - wg.Done() - return - } - } else { // Otherwise add it to the wait queue - if err := s.Repository.QueueData(payload); err != nil { - logrus.Error(err) - } - } - case err := <-sub.Err(): - logrus.Error(err) - case <-s.QuitChan: - logrus.Info("Watcher shutting down") - forwardQuit <- true - wg.Done() + case payload := <-screenAndServePayload: + sap.filterAndServe(payload) + case <-sap.QuitChan: + log.Infof("quiting %s Serve process", sap.chain.String()) return } } }() - ticker := time.NewTicker(5 * time.Second) - // This goroutine is responsible for moving data from the wait queue to the ready queue - // preserving the correct order and alignment with the current index - go func() { - for { + log.Infof("%s Serve goroutine successfully spun up", sap.chain.String()) +} + +// filterAndServe filters the payload according to each subscription type and sends to the subscriptions +func (sap *Service) filterAndServe(payload shared.ConvertedData) { + log.Debugf("sending %s payload to subscriptions", sap.chain.String()) + sap.Lock() + sap.serveWg.Add(1) + defer sap.Unlock() + defer sap.serveWg.Done() + for ty, subs := range sap.Subscriptions { + // Retrieve the subscription parameters for this subscription type + subConfig, ok := sap.SubscriptionTypes[ty] + if !ok { + log.Errorf("super node %s subscription configuration for subscription type %s not available", sap.chain.String(), ty.Hex()) + sap.closeType(ty) + continue + } + if subConfig.EndingBlock().Int64() > 0 && subConfig.EndingBlock().Int64() < payload.Height() { + // We are not out of range for this subscription type + // close it, and continue to the next + sap.closeType(ty) + continue + } + response, err := sap.Filterer.Filter(subConfig, payload) + if err != nil { + log.Errorf("super node filtering error for chain %s: %v", sap.chain.String(), err) + sap.closeType(ty) + continue + } + responseRLP, err := rlp.EncodeToBytes(response) + if err != nil { + log.Errorf("super node rlp encoding error for chain %s: %v", sap.chain.String(), err) + continue + } + for id, sub := range subs { select { - case <-ticker.C: - // Retrieve queued data, in order, and forward it to the ready queue - queueData, newIndex, err := s.Repository.GetQueueData(atomic.LoadInt64(s.payloadIndex)) - if err != nil { - logrus.Error(err) - continue - } - atomic.StoreInt64(s.payloadIndex, newIndex) - if atomic.LoadInt64(s.payloadIndex) > s.endingIndex { - s.QuitChan <- true - } - if err := s.Repository.ReadyData(queueData); err != nil { - logrus.Error(err) - } - case <-forwardQuit: + case sub.PayloadChan <- SubscriptionPayload{Data: responseRLP, Err: "", Flag: EmptyFlag, Height: response.Height()}: + log.Debugf("sending super node %s payload to subscription %s", sap.chain.String(), id) + default: + log.Infof("unable to send %s payload to subscription %s; channel has no receiver", sap.chain.String(), id) + } + } + } +} + +// Subscribe is used by the API to remotely subscribe to the service loop +// The params must be rlp serializable and satisfy the SubscriptionSettings() interface +func (sap *Service) Subscribe(id rpc.ID, sub chan<- SubscriptionPayload, quitChan chan<- bool, params shared.SubscriptionSettings) { + sap.serveWg.Add(1) + defer sap.serveWg.Done() + log.Infof("New %s subscription %s", sap.chain.String(), id) + subscription := Subscription{ + ID: id, + PayloadChan: sub, + QuitChan: quitChan, + } + if params.ChainType() != sap.chain { + sendNonBlockingErr(subscription, fmt.Errorf("subscription %s is for chain %s, service supports chain %s", id, params.ChainType().String(), sap.chain.String())) + sendNonBlockingQuit(subscription) + return + } + // Subscription type is defined as the hash of the rlp-serialized subscription settings + by, err := rlp.EncodeToBytes(params) + if err != nil { + sendNonBlockingErr(subscription, err) + sendNonBlockingQuit(subscription) + return + } + subscriptionType := crypto.Keccak256Hash(by) + if !params.HistoricalDataOnly() { + // Add subscriber + sap.Lock() + if sap.Subscriptions[subscriptionType] == nil { + sap.Subscriptions[subscriptionType] = make(map[rpc.ID]Subscription) + } + sap.Subscriptions[subscriptionType][id] = subscription + sap.SubscriptionTypes[subscriptionType] = params + sap.Unlock() + } + // If the subscription requests a backfill, use the Postgres index to lookup and retrieve historical data + // Otherwise we only filter new data as it is streamed in from the state diffing geth node + if params.HistoricalData() || params.HistoricalDataOnly() { + if err := sap.sendHistoricalData(subscription, id, params); err != nil { + sendNonBlockingErr(subscription, fmt.Errorf("%s super node subscriber backfill error: %v", sap.chain.String(), err)) + sendNonBlockingQuit(subscription) + return + } + } +} + +// sendHistoricalData sends historical data to the requesting subscription +func (sap *Service) sendHistoricalData(sub Subscription, id rpc.ID, params shared.SubscriptionSettings) error { + log.Infof("Sending %s historical data to subscription %s", sap.chain.String(), id) + // Retrieve cached CIDs relevant to this subscriber + var endingBlock int64 + var startingBlock int64 + var err error + startingBlock, err = sap.Retriever.RetrieveFirstBlockNumber() + if err != nil { + return err + } + if startingBlock < params.StartingBlock().Int64() { + startingBlock = params.StartingBlock().Int64() + } + endingBlock, err = sap.Retriever.RetrieveLastBlockNumber() + if err != nil { + return err + } + if endingBlock > params.EndingBlock().Int64() && params.EndingBlock().Int64() > 0 && params.EndingBlock().Int64() > startingBlock { + endingBlock = params.EndingBlock().Int64() + } + log.Debugf("%s historical data starting block: %d", sap.chain.String(), params.StartingBlock().Int64()) + log.Debugf("%s historical data ending block: %d", sap.chain.String(), endingBlock) + go func() { + sap.serveWg.Add(1) + defer sap.serveWg.Done() + for i := startingBlock; i <= endingBlock; i++ { + select { + case <-sap.QuitChan: + log.Infof("%s super node historical data feed to subscription %s closed", sap.chain.String(), id) return default: - // Do nothing, wait til next tick } - } - }() -} - -// backFillOnlyQueuing assumes the data is coming in contiguously from behind the head -// it puts all data directly into the ready queue -// it continues until the watcher is told to quit or we receive notification that the backfill is finished -func (s *Service) backFillOnlyQueuing(wg *sync.WaitGroup, sub *rpc.ClientSubscription) { - wg.Add(1) - go func() { - for { - select { - case payload := <-s.PayloadChan: - // If there is an error associated with the payload, log it and continue - if payload.Error() != nil { - logrus.Error(payload.Error()) + cidWrappers, empty, err := sap.Retriever.Retrieve(params, i) + if err != nil { + sendNonBlockingErr(sub, fmt.Errorf(" %s super node CID Retrieval error at block %d\r%s", sap.chain.String(), i, err.Error())) + continue + } + if empty { + continue + } + for _, cids := range cidWrappers { + response, err := sap.IPLDFetcher.Fetch(cids) + if err != nil { + sendNonBlockingErr(sub, fmt.Errorf("%s super node IPLD Fetching error at block %d\r%s", sap.chain.String(), i, err.Error())) continue } - // If the payload signals that backfilling has completed, shut down the process - if payload.BackFillComplete() { - logrus.Info("Backfill complete, WatchContract shutting down") - wg.Done() - return + responseRLP, err := rlp.EncodeToBytes(response) + if err != nil { + log.Error(err) + continue } - // Add the payload the ready data queue - if err := s.Repository.ReadyData(payload); err != nil { - logrus.Error(err) + select { + case sub.PayloadChan <- SubscriptionPayload{Data: responseRLP, Err: "", Flag: EmptyFlag, Height: response.Height()}: + log.Debugf("sending super node historical data payload to %s subscription %s", sap.chain.String(), id) + default: + log.Infof("unable to send backFill payload to %s subscription %s; channel has no receiver", sap.chain.String(), id) } - case err := <-sub.Err(): - logrus.Error(err) - case <-s.QuitChan: - logrus.Info("Watcher shutting down") - wg.Done() - return } } + // when we are done backfilling send an empty payload signifying so in the msg + select { + case sub.PayloadChan <- SubscriptionPayload{Data: nil, Err: "", Flag: BackFillCompleteFlag}: + log.Debugf("sending backFill completion notice to %s subscription %s", sap.chain.String(), id) + default: + log.Infof("unable to send backFill completion notice to %s subscription %s", sap.chain.String(), id) + } }() + return nil +} + +// Unsubscribe is used by the API to remotely unsubscribe to the StateDiffingService loop +func (sap *Service) Unsubscribe(id rpc.ID) { + log.Infof("Unsubscribing %s from the %s super node service", id, sap.chain.String()) + sap.Lock() + for ty := range sap.Subscriptions { + delete(sap.Subscriptions[ty], id) + if len(sap.Subscriptions[ty]) == 0 { + // If we removed the last subscription of this type, remove the subscription type outright + delete(sap.Subscriptions, ty) + delete(sap.SubscriptionTypes, ty) + } + } + sap.Unlock() +} + +// Start is used to begin the service +// This is mostly just to satisfy the node.Service interface +func (sap *Service) Start(*p2p.Server) error { + log.Infof("Starting %s super node service", sap.chain.String()) + wg := new(sync.WaitGroup) + payloadChan := make(chan shared.ConvertedData, PayloadChanBufferSize) + if err := sap.Sync(wg, payloadChan); err != nil { + return err + } + sap.Serve(wg, payloadChan) + return nil +} + +// Stop is used to close down the service +// This is mostly just to satisfy the node.Service interface +func (sap *Service) Stop() error { + log.Infof("Stopping %s super node service", sap.chain.String()) + sap.Lock() + close(sap.QuitChan) + sap.close() + sap.Unlock() + return nil +} + +// Node returns the node info for this service +func (sap *Service) Node() *core.Node { + return sap.NodeInfo +} + +// Chain returns the chain type for this service +func (sap *Service) Chain() shared.ChainType { + return sap.chain +} + +// close is used to close all listening subscriptions +// close needs to be called with subscription access locked +func (sap *Service) close() { + log.Infof("Closing all %s subscriptions", sap.chain.String()) + for subType, subs := range sap.Subscriptions { + for _, sub := range subs { + sendNonBlockingQuit(sub) + } + delete(sap.Subscriptions, subType) + delete(sap.SubscriptionTypes, subType) + } +} + +// closeType is used to close all subscriptions of given type +// closeType needs to be called with subscription access locked +func (sap *Service) closeType(subType common.Hash) { + log.Infof("Closing all %s subscriptions of type %s", sap.chain.String(), subType.String()) + subs := sap.Subscriptions[subType] + for _, sub := range subs { + sendNonBlockingQuit(sub) + } + delete(sap.Subscriptions, subType) + delete(sap.SubscriptionTypes, subType) } diff --git a/pkg/super_node/service_test.go b/pkg/watcher/service_test.go similarity index 88% rename from pkg/super_node/service_test.go rename to pkg/watcher/service_test.go index bc32849a..c084732b 100644 --- a/pkg/super_node/service_test.go +++ b/pkg/watcher/service_test.go @@ -14,7 +14,7 @@ // You should have received a copy of the GNU Affero General Public License // along with this program. If not, see . -package super_node_test +package watcher_test import ( "sync" @@ -24,10 +24,10 @@ import ( . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" - "github.com/vulcanize/vulcanizedb/pkg/super_node" - "github.com/vulcanize/vulcanizedb/pkg/super_node/eth/mocks" - "github.com/vulcanize/vulcanizedb/pkg/super_node/shared" - mocks2 "github.com/vulcanize/vulcanizedb/pkg/super_node/shared/mocks" + "github.com/vulcanize/ipfs-chain-watcher/pkg/eth/mocks" + "github.com/vulcanize/ipfs-chain-watcher/pkg/shared" + mocks2 "github.com/vulcanize/ipfs-chain-watcher/pkg/shared/mocks" + "github.com/vulcanize/ipfs-chain-watcher/pkg/watcher" ) var _ = Describe("Service", func() { @@ -54,7 +54,7 @@ var _ = Describe("Service", func() { ReturnIPLDPayload: mocks.MockConvertedPayload, ReturnErr: nil, } - processor := &super_node.Service{ + processor := &watcher.Service{ Indexer: mockCidIndexer, Publisher: mockPublisher, Streamer: mockStreamer, diff --git a/pkg/watcher/shared/interfaces.go b/pkg/watcher/shared/interfaces.go deleted file mode 100644 index 5e6c64ee..00000000 --- a/pkg/watcher/shared/interfaces.go +++ /dev/null @@ -1,36 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package shared - -import ( - "github.com/ethereum/go-ethereum/rpc" - - "github.com/vulcanize/vulcanizedb/pkg/super_node" -) - -// Repository is the interface for the Postgres database -type Repository interface { - LoadTriggers() error - QueueData(payload super_node.SubscriptionPayload) error - GetQueueData(height int64) (super_node.SubscriptionPayload, int64, error) - ReadyData(payload super_node.SubscriptionPayload) error -} - -// SuperNodeStreamer is the interface for streaming data from a vulcanizeDB super node -type SuperNodeStreamer interface { - Stream(payloadChan chan super_node.SubscriptionPayload, rlpParams []byte) (*rpc.ClientSubscription, error) -} diff --git a/pkg/watcher/shared/models.go b/pkg/watcher/shared/models.go deleted file mode 100644 index e905a7e2..00000000 --- a/pkg/watcher/shared/models.go +++ /dev/null @@ -1,24 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package shared - -// QueuedData is the db model for queued data -type QueuedData struct { - ID int64 `db:"id"` - Data []byte `db:"data"` - Height int64 `db:"height"` -} diff --git a/pkg/watcher/shared/source_type.go b/pkg/watcher/shared/source_type.go deleted file mode 100644 index 7a1ce730..00000000 --- a/pkg/watcher/shared/source_type.go +++ /dev/null @@ -1,58 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package shared - -import ( - "errors" - "strings" -) - -// SourceType enum for specifying source type for raw chain data -type SourceType int - -const ( - Unknown SourceType = iota - VulcanizeDB - Ethereum - Bitcoin -) - -func (c SourceType) String() string { - switch c { - case Ethereum: - return "Ethereum" - case Bitcoin: - return "Bitcoin" - case VulcanizeDB: - return "VulcanizeDB" - default: - return "" - } -} - -func NewSourceType(name string) (SourceType, error) { - switch strings.ToLower(name) { - case "ethereum", "eth": - return Ethereum, nil - case "bitcoin", "btc", "xbt": - return Bitcoin, nil - case "vulcanizedb", "vdb": - return VulcanizeDB, nil - default: - return Unknown, errors.New("invalid name for data source") - } -} diff --git a/pkg/super_node/subscription.go b/pkg/watcher/subscription.go similarity index 98% rename from pkg/super_node/subscription.go rename to pkg/watcher/subscription.go index a1cdb045..029b6fd4 100644 --- a/pkg/super_node/subscription.go +++ b/pkg/watcher/subscription.go @@ -14,7 +14,7 @@ // You should have received a copy of the GNU Affero General Public License // along with this program. If not, see . -package super_node +package watcher import ( "errors" diff --git a/pkg/super_node/super_node_suite_test.go b/pkg/watcher/super_node_suite_test.go similarity index 90% rename from pkg/super_node/super_node_suite_test.go rename to pkg/watcher/super_node_suite_test.go index 12321813..f09135b9 100644 --- a/pkg/super_node/super_node_suite_test.go +++ b/pkg/watcher/super_node_suite_test.go @@ -14,7 +14,7 @@ // You should have received a copy of the GNU Affero General Public License // along with this program. If not, see . -package super_node_test +package watcher_test import ( "io/ioutil" @@ -25,9 +25,9 @@ import ( "github.com/sirupsen/logrus" ) -func TestSuperNode(t *testing.T) { +func TestIPFSWatcher(t *testing.T) { RegisterFailHandler(Fail) - RunSpecs(t, "Super Node Suite Test") + RunSpecs(t, "IPFS Watcher Suite Test") } var _ = BeforeSuite(func() { diff --git a/plugins/README.md b/plugins/README.md deleted file mode 100644 index a17ed3f1..00000000 --- a/plugins/README.md +++ /dev/null @@ -1,2 +0,0 @@ -## Plugins -This empty directory is for Exporter plugins (.go and .so files) written, built, and linked to by the composeAndExecute command diff --git a/test_config/test_config.go b/test_config/test_config.go index 96969589..e3ac847f 100644 --- a/test_config/test_config.go +++ b/test_config/test_config.go @@ -19,12 +19,13 @@ package test_config import ( "errors" "fmt" + "os" + "github.com/sirupsen/logrus" "github.com/spf13/viper" - "github.com/vulcanize/vulcanizedb/pkg/config" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" - "github.com/vulcanize/vulcanizedb/pkg/postgres" - "os" + "github.com/vulcanize/ipfs-chain-watcher/pkg/config" + "github.com/vulcanize/ipfs-chain-watcher/pkg/core" + "github.com/vulcanize/ipfs-chain-watcher/pkg/postgres" ) var TestConfig *viper.Viper @@ -40,7 +41,7 @@ func init() { func setTestConfig() { TestConfig = viper.New() TestConfig.SetConfigName("testing") - TestConfig.AddConfigPath("$GOPATH/src/github.com/vulcanize/vulcanizedb/environments/") + TestConfig.AddConfigPath("$GOPATH/src/github.com/vulcanize/ipfs-chain-watcher/environments/") err := TestConfig.ReadInConfig() if err != nil { logrus.Fatal(err) @@ -72,7 +73,7 @@ func setTestConfig() { func setABIPath() { gp := os.Getenv("GOPATH") - ABIFilePath = gp + "/src/github.com/vulcanize/vulcanizedb/pkg/eth/testing/" + ABIFilePath = gp + "/src/github.com/vulcanize/ipfs-chain-watcher/pkg/eth/testing/" } func NewTestDB(node core.Node) *postgres.DB { @@ -84,9 +85,6 @@ func NewTestDB(node core.Node) *postgres.DB { } func CleanTestDB(db *postgres.DB) { - db.MustExec("DELETE FROM addresses") - db.MustExec("DELETE FROM blocks") - db.MustExec("DELETE FROM checked_headers") // can't delete from nodes since this function is called after the required node is persisted db.MustExec("DELETE FROM goose_db_version") db.MustExec("DELETE FROM header_sync_logs") diff --git a/libraries/shared/utilities/utilities_suite_test.go b/utils/utilities_suite_test.go similarity index 97% rename from libraries/shared/utilities/utilities_suite_test.go rename to utils/utilities_suite_test.go index cfcc2707..5095a048 100644 --- a/libraries/shared/utilities/utilities_suite_test.go +++ b/utils/utilities_suite_test.go @@ -14,7 +14,7 @@ // You should have received a copy of the GNU Affero General Public License // along with this program. If not, see . -package utilities_test +package utils_test import ( "io/ioutil" diff --git a/utils/utils.go b/utils/utils.go index 2e852790..a970c89e 100644 --- a/utils/utils.go +++ b/utils/utils.go @@ -17,17 +17,14 @@ package utils import ( - "math/big" - "os" - "path/filepath" + "errors" - "github.com/jmoiron/sqlx" "github.com/sirupsen/logrus" - "github.com/vulcanize/vulcanizedb/pkg/config" - "github.com/vulcanize/vulcanizedb/pkg/eth" - "github.com/vulcanize/vulcanizedb/pkg/eth/core" - "github.com/vulcanize/vulcanizedb/pkg/postgres" + "github.com/vulcanize/ipfs-chain-watcher/pkg/config" + "github.com/vulcanize/ipfs-chain-watcher/pkg/core" + "github.com/vulcanize/ipfs-chain-watcher/pkg/postgres" + "github.com/vulcanize/ipfs-chain-watcher/pkg/shared" ) func LoadPostgres(database config.Database, node core.Node) postgres.DB { @@ -38,54 +35,56 @@ func LoadPostgres(database config.Database, node core.Node) postgres.DB { return *db } -func ReadAbiFile(abiFilepath string) string { - abiFilepath = AbsFilePath(abiFilepath) - abi, err := eth.ReadAbiFile(abiFilepath) - if err != nil { - logrus.Fatalf("Error reading ABI file at \"%s\"\n %v", abiFilepath, err) +// GetBlockHeightBins splits a block range up into bins of block heights of the given batch size +func GetBlockHeightBins(startingBlock, endingBlock, batchSize uint64) ([][]uint64, error) { + if endingBlock < startingBlock { + return nil, errors.New("backfill: ending block number needs to be greater than starting block number") } - return abi + if batchSize == 0 { + return nil, errors.New("backfill: batchsize needs to be greater than zero") + } + length := endingBlock - startingBlock + 1 + numberOfBins := length / batchSize + remainder := length % batchSize + if remainder != 0 { + numberOfBins++ + } + blockRangeBins := make([][]uint64, numberOfBins) + for i := range blockRangeBins { + nextBinStart := startingBlock + batchSize + blockRange := make([]uint64, 0, nextBinStart-startingBlock+1) + for j := startingBlock; j < nextBinStart && j <= endingBlock; j++ { + blockRange = append(blockRange, j) + } + startingBlock = nextBinStart + blockRangeBins[i] = blockRange + } + return blockRangeBins, nil } -func AbsFilePath(filePath string) string { - if !filepath.IsAbs(filePath) { - cwd, _ := os.Getwd() - filePath = filepath.Join(cwd, filePath) - } - return filePath -} - -func GetAbi(abiFilepath string, contractHash string, network string) string { - var contractAbiString string - if abiFilepath != "" { - contractAbiString = ReadAbiFile(abiFilepath) - } else { - url := eth.GenURL(network) - etherscan := eth.NewEtherScanClient(url) - logrus.Printf("No ABI supplied. Retrieving ABI from Etherscan: %s", url) - contractAbiString, _ = etherscan.GetAbi(contractHash) - } - _, err := eth.ParseAbi(contractAbiString) - if err != nil { - logrus.Fatalln("Invalid ABI: ", err) - } - return contractAbiString -} - -func RequestedBlockNumber(blockNumber *int64) *big.Int { - var _blockNumber *big.Int - if *blockNumber == -1 { - _blockNumber = nil - } else { - _blockNumber = big.NewInt(*blockNumber) - } - return _blockNumber -} - -func RollbackAndLogFailure(tx *sqlx.Tx, txErr error, fieldName string) { - rollbackErr := tx.Rollback() - if rollbackErr != nil { - logrus.WithFields(logrus.Fields{"rollbackErr": rollbackErr, "txErr": txErr}). - Warnf("failed to rollback transaction after failing to insert %s", fieldName) +// MissingHeightsToGaps returns a slice of gaps from a slice of missing block heights +func MissingHeightsToGaps(heights []uint64) []shared.Gap { + if len(heights) == 0 { + return nil } + validationGaps := make([]shared.Gap, 0) + start := heights[0] + lastHeight := start + for i, height := range heights[1:] { + if height != lastHeight+1 { + validationGaps = append(validationGaps, shared.Gap{ + Start: start, + Stop: lastHeight, + }) + start = height + } + if i+2 == len(heights) { + validationGaps = append(validationGaps, shared.Gap{ + Start: start, + Stop: height, + }) + } + lastHeight = height + } + return validationGaps } diff --git a/libraries/shared/utilities/utils_test.go b/utils/utils_test.go similarity index 96% rename from libraries/shared/utilities/utils_test.go rename to utils/utils_test.go index 8a3050ef..78dc965e 100644 --- a/libraries/shared/utilities/utils_test.go +++ b/utils/utils_test.go @@ -14,13 +14,13 @@ // You should have received a copy of the GNU Affero General Public License // along with this program. If not, see . -package utilities_test +package utils_test import ( . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" - utils "github.com/vulcanize/vulcanizedb/libraries/shared/utilities" + "github.com/vulcanize/ipfs-chain-watcher/utils" ) var _ = Describe("GetBlockHeightBins", func() {