Merge pull request #120 from vulcanize/logging-enhancement

Logging enhancement
This commit is contained in:
Andrew J Yao 2019-07-23 13:22:58 -07:00 committed by GitHub
commit a3b55bfd56
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
10 changed files with 86 additions and 67 deletions

View File

@ -39,6 +39,8 @@ var coldImportCmd = &cobra.Command{
Geth must be synced over all of the desired blocks and must not be running in order to execute this command.`,
Run: func(cmd *cobra.Command, args []string) {
SubCommand = cmd.CalledAs()
LogWithCommand = *log.WithField("SubCommand", SubCommand)
coldImport()
},
}
@ -55,7 +57,7 @@ func coldImport() {
ethDBConfig := ethereum.CreateDatabaseConfig(ethereum.Level, levelDbPath)
ethDB, err := ethereum.CreateDatabase(ethDBConfig)
if err != nil {
log.Fatal("Error connecting to ethereum db: ", err)
LogWithCommand.Fatal("Error connecting to ethereum db: ", err)
}
mostRecentBlockNumberInDb := ethDB.GetHeadBlockNumber()
if syncAll {
@ -63,10 +65,10 @@ func coldImport() {
endingBlockNumber = mostRecentBlockNumberInDb
}
if endingBlockNumber < startingBlockNumber {
log.Fatal("Ending block number must be greater than starting block number for cold import.")
LogWithCommand.Fatal("Ending block number must be greater than starting block number for cold import.")
}
if endingBlockNumber > mostRecentBlockNumberInDb {
log.Fatal("Ending block number is greater than most recent block in db: ", mostRecentBlockNumberInDb)
LogWithCommand.Fatal("Ending block number is greater than most recent block in db: ", mostRecentBlockNumberInDb)
}
// init pg db
@ -76,7 +78,7 @@ func coldImport() {
nodeBuilder := cold_import.NewColdImportNodeBuilder(reader, parser)
coldNode, err := nodeBuilder.GetNode(genesisBlock, levelDbPath)
if err != nil {
log.Fatal("Error getting node: ", err)
LogWithCommand.Fatal("Error getting node: ", err)
}
pgDB := utils.LoadPostgres(databaseConfig, coldNode)
@ -90,6 +92,6 @@ func coldImport() {
coldImporter := cold_import.NewColdImporter(ethDB, blockRepository, receiptRepository, blockConverter)
err = coldImporter.Execute(startingBlockNumber, endingBlockNumber, coldNode.ID)
if err != nil {
log.Fatal("Error executing cold import: ", err)
LogWithCommand.Fatal("Error executing cold import: ", err)
}
}

View File

@ -101,6 +101,8 @@ single config file or in separate command instances using different config files
Specify config location when executing the command:
./vulcanizedb compose --config=./environments/config_name.toml`,
Run: func(cmd *cobra.Command, args []string) {
SubCommand = cmd.CalledAs()
LogWithCommand = *log.WithField("SubCommand", SubCommand)
compose()
},
}
@ -110,23 +112,25 @@ func compose() {
prepConfig()
// Generate code to build the plugin according to the config file
log.Info("generating plugin")
LogWithCommand.Info("generating plugin")
generator, err := p2.NewGenerator(genConfig, databaseConfig)
if err != nil {
log.Fatal(err)
LogWithCommand.Debug("initializing plugin generator failed")
LogWithCommand.Fatal(err)
}
err = generator.GenerateExporterPlugin()
if err != nil {
log.Debug("generating plugin failed")
log.Fatal(err)
LogWithCommand.Debug("generating plugin failed")
LogWithCommand.Fatal(err)
}
// TODO: Embed versioning info in the .so files so we know which version of vulcanizedb to run them with
_, pluginPath, err := genConfig.GetPluginPaths()
if err != nil {
log.Fatal(err)
LogWithCommand.Debug("getting plugin path failed")
LogWithCommand.Fatal(err)
}
fmt.Printf("Composed plugin %s", pluginPath)
log.Info("plugin .so file output to", pluginPath)
LogWithCommand.Info("plugin .so file output to ", pluginPath)
}
func init() {
@ -134,38 +138,38 @@ func init() {
}
func prepConfig() {
log.Info("configuring plugin")
LogWithCommand.Info("configuring plugin")
names := viper.GetStringSlice("exporter.transformerNames")
transformers := make(map[string]config.Transformer)
for _, name := range names {
transformer := viper.GetStringMapString("exporter." + name)
p, pOK := transformer["path"]
if !pOK || p == "" {
log.Fatal(name, "transformer config is missing `path` value")
LogWithCommand.Fatal(name, " transformer config is missing `path` value")
}
r, rOK := transformer["repository"]
if !rOK || r == "" {
log.Fatal(name, "transformer config is missing `repository` value")
LogWithCommand.Fatal(name, " transformer config is missing `repository` value")
}
m, mOK := transformer["migrations"]
if !mOK || m == "" {
log.Fatal(name, "transformer config is missing `migrations` value")
LogWithCommand.Fatal(name, " transformer config is missing `migrations` value")
}
mr, mrOK := transformer["rank"]
if !mrOK || mr == "" {
log.Fatal(name, "transformer config is missing `rank` value")
LogWithCommand.Fatal(name, " transformer config is missing `rank` value")
}
rank, err := strconv.ParseUint(mr, 10, 64)
if err != nil {
log.Fatal(name, "migration `rank` can't be converted to an unsigned integer")
LogWithCommand.Fatal(name, " migration `rank` can't be converted to an unsigned integer")
}
t, tOK := transformer["type"]
if !tOK {
log.Fatal(name, "transformer config is missing `type` value")
LogWithCommand.Fatal(name, " transformer config is missing `type` value")
}
transformerType := config.GetTransformerType(t)
if transformerType == config.UnknownTransformerType {
log.Fatal(errors.New(`unknown transformer type in exporter config accepted types are "eth_event", "eth_storage"`))
LogWithCommand.Fatal(errors.New(`unknown transformer type in exporter config accepted types are "eth_event", "eth_storage"`))
}
transformers[name] = config.Transformer{

View File

@ -105,6 +105,8 @@ single config file or in separate command instances using different config files
Specify config location when executing the command:
./vulcanizedb composeAndExecute --config=./environments/config_name.toml`,
Run: func(cmd *cobra.Command, args []string) {
SubCommand = cmd.CalledAs()
LogWithCommand = *log.WithField("SubCommand", SubCommand)
composeAndExecute()
},
}
@ -114,44 +116,44 @@ func composeAndExecute() {
prepConfig()
// Generate code to build the plugin according to the config file
log.Info("generating plugin")
LogWithCommand.Info("generating plugin")
generator, err := p2.NewGenerator(genConfig, databaseConfig)
if err != nil {
log.Fatal(err)
LogWithCommand.Fatal(err)
}
err = generator.GenerateExporterPlugin()
if err != nil {
log.Debug("generating plugin failed")
log.Fatal(err)
LogWithCommand.Debug("generating plugin failed")
LogWithCommand.Fatal(err)
}
// Get the plugin path and load the plugin
_, pluginPath, err := genConfig.GetPluginPaths()
if err != nil {
log.Fatal(err)
LogWithCommand.Fatal(err)
}
if !genConfig.Save {
defer helpers.ClearFiles(pluginPath)
}
log.Info("linking plugin", pluginPath)
LogWithCommand.Info("linking plugin ", pluginPath)
plug, err := plugin.Open(pluginPath)
if err != nil {
log.Debug("linking plugin failed")
log.Fatal(err)
LogWithCommand.Debug("linking plugin failed")
LogWithCommand.Fatal(err)
}
// Load the `Exporter` symbol from the plugin
log.Info("loading transformers from plugin")
LogWithCommand.Info("loading transformers from plugin")
symExporter, err := plug.Lookup("Exporter")
if err != nil {
log.Debug("loading Exporter symbol failed")
log.Fatal(err)
LogWithCommand.Debug("loading Exporter symbol failed")
LogWithCommand.Fatal(err)
}
// Assert that the symbol is of type Exporter
exporter, ok := symExporter.(Exporter)
if !ok {
log.Debug("plugged-in symbol not of type Exporter")
LogWithCommand.Debug("plugged-in symbol not of type Exporter")
os.Exit(1)
}

View File

@ -79,6 +79,8 @@ Requires a .toml config file:
piping = true
`,
Run: func(cmd *cobra.Command, args []string) {
SubCommand = cmd.CalledAs()
LogWithCommand = *log.WithField("SubCommand", SubCommand)
contractWatcher()
},
}
@ -103,18 +105,18 @@ func contractWatcher() {
case "full":
t = ft.NewTransformer(con, blockChain, &db)
default:
log.Fatal("Invalid mode")
LogWithCommand.Fatal("Invalid mode")
}
err := t.Init()
if err != nil {
log.Fatal(fmt.Sprintf("Failed to initialized transformer\r\nerr: %v\r\n", err))
LogWithCommand.Fatal(fmt.Sprintf("Failed to initialize transformer, err: %v ", err))
}
for range ticker.C {
err = t.Execute()
if err != nil {
log.Error("Execution error for transformer:", t.GetConfig().Name, err)
LogWithCommand.Error("Execution error for transformer: ", t.GetConfig().Name, err)
}
}
}

View File

@ -63,6 +63,8 @@ must have been composed by the same version of vulcanizedb or else it will not b
Specify config location when executing the command:
./vulcanizedb execute --config=./environments/config_name.toml`,
Run: func(cmd *cobra.Command, args []string) {
SubCommand = cmd.CalledAs()
LogWithCommand = *log.WithField("SubCommand", SubCommand)
execute()
},
}
@ -74,29 +76,29 @@ func execute() {
// Get the plugin path and load the plugin
_, pluginPath, err := genConfig.GetPluginPaths()
if err != nil {
log.Fatal(err)
LogWithCommand.Fatal(err)
}
fmt.Printf("Executing plugin %s", pluginPath)
log.Info("linking plugin", pluginPath)
LogWithCommand.Info("linking plugin ", pluginPath)
plug, err := plugin.Open(pluginPath)
if err != nil {
log.Warn("linking plugin failed")
log.Fatal(err)
LogWithCommand.Warn("linking plugin failed")
LogWithCommand.Fatal(err)
}
// Load the `Exporter` symbol from the plugin
log.Info("loading transformers from plugin")
LogWithCommand.Info("loading transformers from plugin")
symExporter, err := plug.Lookup("Exporter")
if err != nil {
log.Warn("loading Exporter symbol failed")
log.Fatal(err)
LogWithCommand.Warn("loading Exporter symbol failed")
LogWithCommand.Fatal(err)
}
// Assert that the symbol is of type Exporter
exporter, ok := symExporter.(Exporter)
if !ok {
log.Fatal("plugged-in symbol not of type Exporter")
LogWithCommand.Fatal("plugged-in symbol not of type Exporter")
}
// Use the Exporters export method to load the EventTransformerInitializer, StorageTransformerInitializer, and ContractTransformerInitializer sets
@ -147,7 +149,7 @@ type Exporter interface {
func watchEthEvents(w *watcher.EventWatcher, wg *syn.WaitGroup) {
defer wg.Done()
// Execute over the EventTransformerInitializer set using the watcher
log.Info("executing event transformers")
LogWithCommand.Info("executing event transformers")
var recheck constants.TransformerExecution
if recheckHeadersArg {
recheck = constants.HeaderRecheck
@ -164,7 +166,7 @@ func watchEthEvents(w *watcher.EventWatcher, wg *syn.WaitGroup) {
func watchEthStorage(w *watcher.StorageWatcher, wg *syn.WaitGroup) {
defer wg.Done()
// Execute over the StorageTransformerInitializer set using the storage watcher
log.Info("executing storage transformers")
LogWithCommand.Info("executing storage transformers")
ticker := time.NewTicker(pollingInterval)
defer ticker.Stop()
for range ticker.C {
@ -177,7 +179,7 @@ func watchEthStorage(w *watcher.StorageWatcher, wg *syn.WaitGroup) {
func watchEthContract(w *watcher.ContractWatcher, wg *syn.WaitGroup) {
defer wg.Done()
// Execute over the ContractTransformerInitializer set using the contract watcher
log.Info("executing contract_watcher transformers")
LogWithCommand.Info("executing contract_watcher transformers")
ticker := time.NewTicker(pollingInterval)
defer ticker.Stop()
for range ticker.C {

View File

@ -49,20 +49,21 @@ Expects ethereum node to be running and requires a .toml config:
ipcPath = "/Users/user/Library/Ethereum/geth.ipc"
`,
Run: func(cmd *cobra.Command, args []string) {
SubCommand = cmd.CalledAs()
LogWithCommand = *log.WithField("SubCommand", SubCommand)
fullSync()
},
}
func init() {
rootCmd.AddCommand(fullSyncCmd)
fullSyncCmd.Flags().Int64VarP(&startingBlockNumber, "starting-block-number", "s", 0, "Block number to start syncing from")
}
func backFillAllBlocks(blockchain core.BlockChain, blockRepository datastore.BlockRepository, missingBlocksPopulated chan int, startingBlockNumber int64) {
populated, err := history.PopulateMissingBlocks(blockchain, blockRepository, startingBlockNumber)
if err != nil {
log.Error("backfillAllBlocks: error in populateMissingBlocks: ", err)
LogWithCommand.Error("backfillAllBlocks: error in populateMissingBlocks: ", err)
}
missingBlocksPopulated <- populated
}
@ -74,13 +75,13 @@ func fullSync() {
blockChain := getBlockChain()
lastBlock, err := blockChain.LastBlock()
if err != nil {
log.Error("fullSync: Error getting last block: ", err)
LogWithCommand.Error("fullSync: Error getting last block: ", err)
}
if lastBlock.Int64() == 0 {
log.Fatal("geth initial: state sync not finished")
LogWithCommand.Fatal("geth initial: state sync not finished")
}
if startingBlockNumber > lastBlock.Int64() {
log.Fatal("fullSync: starting block number > current block number")
LogWithCommand.Fatal("fullSync: starting block number > current block number")
}
db := utils.LoadPostgres(databaseConfig, blockChain.Node())
@ -94,9 +95,9 @@ func fullSync() {
case <-ticker.C:
window, err := validator.ValidateBlocks()
if err != nil {
log.Error("fullSync: error in validateBlocks: ", err)
LogWithCommand.Error("fullSync: error in validateBlocks: ", err)
}
log.Debug(window.GetString())
LogWithCommand.Debug(window.GetString())
case <-missingBlocksPopulated:
go backFillAllBlocks(blockChain, blockRepository, missingBlocksPopulated, startingBlockNumber)
}

View File

@ -50,6 +50,8 @@ Expects ethereum node to be running and requires a .toml config:
ipcPath = "/Users/user/Library/Ethereum/geth.ipc"
`,
Run: func(cmd *cobra.Command, args []string) {
SubCommand = cmd.CalledAs()
LogWithCommand = *log.WithField("SubCommand", SubCommand)
headerSync()
},
}
@ -64,7 +66,7 @@ func backFillAllHeaders(blockchain core.BlockChain, headerRepository datastore.H
if err != nil {
// TODO Lots of possible errors in the call stack above. If errors occur, we still put
// 0 in the channel, triggering another round
log.Error("backfillAllHeaders: Error populating headers: ", err)
LogWithCommand.Error("backfillAllHeaders: Error populating headers: ", err)
}
missingBlocksPopulated <- populated
}
@ -86,9 +88,9 @@ func headerSync() {
case <-ticker.C:
window, err := validator.ValidateHeaders()
if err != nil {
log.Error("headerSync: ValidateHeaders failed: ", err)
LogWithCommand.Error("headerSync: ValidateHeaders failed: ", err)
}
log.Debug(window.GetString())
LogWithCommand.Debug(window.GetString())
case n := <-missingBlocksPopulated:
if n == 0 {
time.Sleep(3 * time.Second)
@ -101,12 +103,12 @@ func headerSync() {
func validateArgs(blockChain *geth.BlockChain) {
lastBlock, err := blockChain.LastBlock()
if err != nil {
log.Error("validateArgs: Error getting last block: ", err)
LogWithCommand.Error("validateArgs: Error getting last block: ", err)
}
if lastBlock.Int64() == 0 {
log.Fatal("geth initial: state sync not finished")
LogWithCommand.Fatal("geth initial: state sync not finished")
}
if startingBlockNumber > lastBlock.Int64() {
log.Fatal("starting block number > current block number")
LogWithCommand.Fatal("starting block number > current block number")
}
}

View File

@ -46,6 +46,8 @@ var (
syncAll bool
endingBlockNumber int64
recheckHeadersArg bool
SubCommand string
LogWithCommand log.Entry
)
const (
@ -66,16 +68,15 @@ func Execute() {
}
func initFuncs(cmd *cobra.Command, args []string) {
database(cmd, args)
logLvlErr := logLevel(cmd, args)
setViperConfigs()
logLvlErr := logLevel()
if logLvlErr != nil {
log.Fatal("Could not set log level: ", logLvlErr)
}
}
func database(cmd *cobra.Command, args []string) {
func setViperConfigs() {
ipc = viper.GetString("client.ipcpath")
levelDbPath = viper.GetString("client.leveldbpath")
storageDiffsPath = viper.GetString("filesystem.storageDiffsPath")
@ -89,12 +90,15 @@ func database(cmd *cobra.Command, args []string) {
viper.Set("database.config", databaseConfig)
}
func logLevel(cmd *cobra.Command, args []string) error {
func logLevel() error {
lvl, err := log.ParseLevel(viper.GetString("log.level"))
if err != nil {
return err
}
log.SetLevel(lvl)
if lvl > log.InfoLevel {
log.SetReportCaller(true)
}
log.Info("Log level set to ", lvl.String())
return nil
}
@ -151,7 +155,7 @@ func getBlockChain() *geth.BlockChain {
rawRpcClient, err := rpc.Dial(ipc)
if err != nil {
log.Fatal("Could not dial client: ", err)
LogWithCommand.Fatal(err)
}
rpcClient := client.NewRpcClient(rawRpcClient, ipc)
ethClient := ethclient.NewClient(rawRpcClient)

View File

@ -3,15 +3,15 @@ package main
import (
"github.com/vulcanize/vulcanizedb/cmd"
log "github.com/sirupsen/logrus"
"os"
log "github.com/sirupsen/logrus"
)
func main() {
log.SetFormatter(&log.JSONFormatter{
PrettyPrint: true,
})
log.SetReportCaller(true)
file, err := os.OpenFile("vulcanizedb.log",
os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0666)
if err == nil {

View File

@ -42,7 +42,7 @@ func PopulateMissingHeaders(blockChain core.BlockChain, headerRepository datasto
log.Debug(getBlockRangeString(blockNumbers))
_, err = RetrieveAndUpdateHeaders(blockChain, headerRepository, blockNumbers)
if err != nil {
log.Error("PopulateMissingHeaders: Error getting/updating headers:", err)
log.Error("PopulateMissingHeaders: Error getting/updating headers: ", err)
return 0, err
}
return len(blockNumbers), nil