diff --git a/documentation/contributing.md b/documentation/contributing.md index 369cbd95..a15f0b8a 100644 --- a/documentation/contributing.md +++ b/documentation/contributing.md @@ -12,7 +12,7 @@ can be run together with other custom transformers using the [composeAndExeucte] - Update the README or any [documentation files](./) as necessary. If editing the Readme, please conform to the [standard-readme specification](https://github.com/RichardLitt/standard-readme). -- You may merge a Pull Request once you have an approval from core developer. +- Once a Pull Request has received two approvals it can be merged in by a core developer. ## Creating a new migration file 1. `make new_migration NAME=add_columnA_to_table1` diff --git a/documentation/custom-transformers.md b/documentation/custom-transformers.md index 41d0a449..a29aabd4 100644 --- a/documentation/custom-transformers.md +++ b/documentation/custom-transformers.md @@ -46,10 +46,10 @@ To update a plugin repository with changes to the core vulcanizedb repository, r * The `compose`, `execute`, `composeAndExecute` commands require Go 1.11+ and use [Go plugins](https://golang .org/pkg/plugin/) which only work on Unix-based systems. -* There is an ongoing [conflict](https://github.com/golang/go/issues/20481) between Go plugins and the use vendored +* There is an ongoing [conflict](https://github.com/golang/go/issues/20481) between Go plugins and the use of vendored dependencies which imposes certain limitations on how the plugins are built. -* Separate `compose` and `execute` commands allow pre-building and linking to a pre-built .so file. So, if +* Separate `compose` and `execute` commands allow pre-building and linking to the pre-built .so file. So, if these are run independently, instead of using `composeAndExecute`, a couple of things need to be considered: * It is necessary that the .so file was built with the same exact dependencies that are present in the execution environment, i.e. we need to `compose` and `execute` the plugin .so file with the same exact version of vulcanizeDB. @@ -61,19 +61,19 @@ these are run independently, instead of using `composeAndExecute`, a couple of t `$GOPATH`, and that all of the transformer repositories for building the plugin are present at their `$GOPATH` directories. * The `execute` command does not require the plugin transformer dependencies be located in their `$GOPATH` directories, -instead it expects a prebuilt .so file (of the name specified in the config file) to be in +instead it expects a .so file (of the name specified in the config file) to be in `$GOPATH/src/github.com/vulcanize/vulcanizedb/plugins/` and, as noted above, also expects the plugin db migrations to have already been ran against the database. * Usage: - * compose: `./vulcanizedb compose --config=./environments/config_name.toml` + * compose: `./vulcanizedb compose --config=environments/config_name.toml` - * execute: `./vulcanizedb execute --config=./environments/config_name.toml` + * execute: `./vulcanizedb execute --config=environments/config_name.toml` - * composeAndExecute: `./vulcanizedb composeAndExecute --config=./environments/config_name.toml` + * composeAndExecute: `./vulcanizedb composeAndExecute --config=environments/config_name.toml` ### Flags -The `compose` and `composeAndExecute` commands can be passed optional flags to specify the operation of the watchers: +The `execute` and `composeAndExecute` commands can be passed optional flags to specify the operation of the watchers: - `--recheck-headers`/`-r` - specifies whether to re-check headers for events after the header has already been queried for watched logs. Can be useful for redundancy if you suspect that your node is not always returning all desired logs on every query. diff --git a/pkg/history/populate_headers.go b/pkg/history/populate_headers.go index c75d4213..dd8028ce 100644 --- a/pkg/history/populate_headers.go +++ b/pkg/history/populate_headers.go @@ -24,14 +24,14 @@ import ( "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres/repositories" ) -func PopulateMissingHeaders(blockchain core.BlockChain, headerRepository datastore.HeaderRepository, startingBlockNumber int64) (int, error) { - lastBlock, err := blockchain.LastBlock() +func PopulateMissingHeaders(blockChain core.BlockChain, headerRepository datastore.HeaderRepository, startingBlockNumber int64) (int, error) { + lastBlock, err := blockChain.LastBlock() if err != nil { log.Error("PopulateMissingHeaders: Error getting last block: ", err) return 0, err } - blockNumbers, err := headerRepository.MissingBlockNumbers(startingBlockNumber, lastBlock.Int64(), blockchain.Node().ID) + blockNumbers, err := headerRepository.MissingBlockNumbers(startingBlockNumber, lastBlock.Int64(), blockChain.Node().ID) if err != nil { log.Error("PopulateMissingHeaders: Error getting missing block numbers: ", err) return 0, err @@ -40,7 +40,7 @@ func PopulateMissingHeaders(blockchain core.BlockChain, headerRepository datasto } log.Printf("Backfilling %d blocks\n\n", len(blockNumbers)) - _, err = RetrieveAndUpdateHeaders(blockchain, headerRepository, blockNumbers) + _, err = RetrieveAndUpdateHeaders(blockChain, headerRepository, blockNumbers) if err != nil { log.Error("PopulateMissingHeaders: Error getting/updating headers:", err) return 0, err @@ -48,8 +48,8 @@ func PopulateMissingHeaders(blockchain core.BlockChain, headerRepository datasto return len(blockNumbers), nil } -func RetrieveAndUpdateHeaders(blockchain core.BlockChain, headerRepository datastore.HeaderRepository, blockNumbers []int64) (int, error) { - headers, err := blockchain.GetHeadersByNumbers(blockNumbers) +func RetrieveAndUpdateHeaders(blockChain core.BlockChain, headerRepository datastore.HeaderRepository, blockNumbers []int64) (int, error) { + headers, err := blockChain.GetHeadersByNumbers(blockNumbers) for _, header := range headers { _, err = headerRepository.CreateOrUpdateHeader(header) if err != nil {