Merge pull request #116 from vulcanize/storage-diffs-over-rpc

Storage diffs over rpc
This commit is contained in:
Elizabeth 2019-10-02 09:56:29 -05:00 committed by GitHub
commit b4e16c4af5
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
37 changed files with 1140 additions and 430 deletions

View File

@ -17,6 +17,10 @@
package cmd package cmd
import ( import (
"github.com/ethereum/go-ethereum/statediff"
"github.com/vulcanize/vulcanizedb/libraries/shared/fetcher"
"github.com/vulcanize/vulcanizedb/libraries/shared/streamer"
"github.com/vulcanize/vulcanizedb/pkg/fs"
"os" "os"
"plugin" "plugin"
syn "sync" syn "sync"
@ -25,9 +29,7 @@ import (
log "github.com/sirupsen/logrus" log "github.com/sirupsen/logrus"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/vulcanize/vulcanizedb/libraries/shared/fetcher"
"github.com/vulcanize/vulcanizedb/libraries/shared/watcher" "github.com/vulcanize/vulcanizedb/libraries/shared/watcher"
"github.com/vulcanize/vulcanizedb/pkg/fs"
p2 "github.com/vulcanize/vulcanizedb/pkg/plugin" p2 "github.com/vulcanize/vulcanizedb/pkg/plugin"
"github.com/vulcanize/vulcanizedb/pkg/plugin/helpers" "github.com/vulcanize/vulcanizedb/pkg/plugin/helpers"
"github.com/vulcanize/vulcanizedb/utils" "github.com/vulcanize/vulcanizedb/utils"
@ -179,6 +181,19 @@ func composeAndExecute() {
} }
if len(ethStorageInitializers) > 0 { if len(ethStorageInitializers) > 0 {
switch storageDiffsSource {
case "geth":
log.Debug("fetching storage diffs from geth pub sub")
rpcClient, _ := getClients()
stateDiffStreamer := streamer.NewStateDiffStreamer(rpcClient)
payloadChan := make(chan statediff.Payload)
storageFetcher := fetcher.NewGethRpcStorageFetcher(&stateDiffStreamer, payloadChan)
sw := watcher.NewStorageWatcher(storageFetcher, &db)
sw.AddTransformers(ethStorageInitializers)
wg.Add(1)
go watchEthStorage(&sw, &wg)
default:
log.Debug("fetching storage diffs from csv")
tailer := fs.FileTailer{Path: storageDiffsPath} tailer := fs.FileTailer{Path: storageDiffsPath}
storageFetcher := fetcher.NewCsvTailStorageFetcher(tailer) storageFetcher := fetcher.NewCsvTailStorageFetcher(tailer)
sw := watcher.NewStorageWatcher(storageFetcher, &db) sw := watcher.NewStorageWatcher(storageFetcher, &db)
@ -186,6 +201,7 @@ func composeAndExecute() {
wg.Add(1) wg.Add(1)
go watchEthStorage(&sw, &wg) go watchEthStorage(&sw, &wg)
} }
}
if len(ethContractInitializers) > 0 { if len(ethContractInitializers) > 0 {
gw := watcher.NewContractWatcher(&db, blockChain) gw := watcher.NewContractWatcher(&db, blockChain)

View File

@ -18,6 +18,10 @@ package cmd
import ( import (
"fmt" "fmt"
"github.com/ethereum/go-ethereum/statediff"
"github.com/vulcanize/vulcanizedb/libraries/shared/fetcher"
"github.com/vulcanize/vulcanizedb/libraries/shared/streamer"
"github.com/vulcanize/vulcanizedb/pkg/fs"
"plugin" "plugin"
syn "sync" syn "sync"
"time" "time"
@ -26,11 +30,9 @@ import (
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/vulcanize/vulcanizedb/libraries/shared/constants" "github.com/vulcanize/vulcanizedb/libraries/shared/constants"
"github.com/vulcanize/vulcanizedb/libraries/shared/fetcher"
storageUtils "github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils" storageUtils "github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils"
"github.com/vulcanize/vulcanizedb/libraries/shared/transformer" "github.com/vulcanize/vulcanizedb/libraries/shared/transformer"
"github.com/vulcanize/vulcanizedb/libraries/shared/watcher" "github.com/vulcanize/vulcanizedb/libraries/shared/watcher"
"github.com/vulcanize/vulcanizedb/pkg/fs"
"github.com/vulcanize/vulcanizedb/utils" "github.com/vulcanize/vulcanizedb/utils"
) )
@ -123,6 +125,19 @@ func execute() {
} }
if len(ethStorageInitializers) > 0 { if len(ethStorageInitializers) > 0 {
switch storageDiffsSource {
case "geth":
log.Debug("fetching storage diffs from geth pub sub")
rpcClient, _ := getClients()
stateDiffStreamer := streamer.NewStateDiffStreamer(rpcClient)
payloadChan := make(chan statediff.Payload)
storageFetcher := fetcher.NewGethRpcStorageFetcher(&stateDiffStreamer, payloadChan)
sw := watcher.NewStorageWatcher(storageFetcher, &db)
sw.AddTransformers(ethStorageInitializers)
wg.Add(1)
go watchEthStorage(&sw, &wg)
default:
log.Debug("fetching storage diffs from csv")
tailer := fs.FileTailer{Path: storageDiffsPath} tailer := fs.FileTailer{Path: storageDiffsPath}
storageFetcher := fetcher.NewCsvTailStorageFetcher(tailer) storageFetcher := fetcher.NewCsvTailStorageFetcher(tailer)
sw := watcher.NewStorageWatcher(storageFetcher, &db) sw := watcher.NewStorageWatcher(storageFetcher, &db)
@ -130,6 +145,7 @@ func execute() {
wg.Add(1) wg.Add(1)
go watchEthStorage(&sw, &wg) go watchEthStorage(&sw, &wg)
} }
}
if len(ethContractInitializers) > 0 { if len(ethContractInitializers) > 0 {
gw := watcher.NewContractWatcher(&db, blockChain) gw := watcher.NewContractWatcher(&db, blockChain)
@ -166,7 +182,7 @@ func watchEthEvents(w *watcher.EventWatcher, wg *syn.WaitGroup) {
} }
} }
func watchEthStorage(w *watcher.StorageWatcher, wg *syn.WaitGroup) { func watchEthStorage(w watcher.IStorageWatcher, wg *syn.WaitGroup) {
defer wg.Done() defer wg.Done()
// Execute over the StorageTransformerInitializer set using the storage watcher // Execute over the StorageTransformerInitializer set using the storage watcher
LogWithCommand.Info("executing storage transformers") LogWithCommand.Info("executing storage transformers")
@ -174,8 +190,8 @@ func watchEthStorage(w *watcher.StorageWatcher, wg *syn.WaitGroup) {
defer ticker.Stop() defer ticker.Stop()
for range ticker.C { for range ticker.C {
errs := make(chan error) errs := make(chan error)
rows := make(chan storageUtils.StorageDiffRow) diffs := make(chan storageUtils.StorageDiff)
w.Execute(rows, errs, queueRecheckInterval) w.Execute(diffs, errs, queueRecheckInterval)
} }
} }

View File

@ -48,6 +48,7 @@ var (
recheckHeadersArg bool recheckHeadersArg bool
SubCommand string SubCommand string
LogWithCommand log.Entry LogWithCommand log.Entry
storageDiffsSource string
) )
const ( const (
@ -80,6 +81,7 @@ func setViperConfigs() {
ipc = viper.GetString("client.ipcpath") ipc = viper.GetString("client.ipcpath")
levelDbPath = viper.GetString("client.leveldbpath") levelDbPath = viper.GetString("client.leveldbpath")
storageDiffsPath = viper.GetString("filesystem.storageDiffsPath") storageDiffsPath = viper.GetString("filesystem.storageDiffsPath")
storageDiffsSource = viper.GetString("storageDiffs.source")
databaseConfig = config.Database{ databaseConfig = config.Database{
Name: viper.GetString("database.name"), Name: viper.GetString("database.name"),
Hostname: viper.GetString("database.hostname"), Hostname: viper.GetString("database.hostname"),
@ -118,6 +120,7 @@ func init() {
rootCmd.PersistentFlags().String("client-ipcPath", "", "location of geth.ipc file") rootCmd.PersistentFlags().String("client-ipcPath", "", "location of geth.ipc file")
rootCmd.PersistentFlags().String("client-levelDbPath", "", "location of levelDb chaindata") rootCmd.PersistentFlags().String("client-levelDbPath", "", "location of levelDb chaindata")
rootCmd.PersistentFlags().String("filesystem-storageDiffsPath", "", "location of storage diffs csv file") rootCmd.PersistentFlags().String("filesystem-storageDiffsPath", "", "location of storage diffs csv file")
rootCmd.PersistentFlags().String("storageDiffs-source", "csv", "where to get the state diffs: csv or geth")
rootCmd.PersistentFlags().String("exporter-name", "exporter", "name of exporter plugin") rootCmd.PersistentFlags().String("exporter-name", "exporter", "name of exporter plugin")
rootCmd.PersistentFlags().String("log-level", log.InfoLevel.String(), "Log level (trace, debug, info, warn, error, fatal, panic") rootCmd.PersistentFlags().String("log-level", log.InfoLevel.String(), "Log level (trace, debug, info, warn, error, fatal, panic")
@ -129,6 +132,7 @@ func init() {
viper.BindPFlag("client.ipcPath", rootCmd.PersistentFlags().Lookup("client-ipcPath")) viper.BindPFlag("client.ipcPath", rootCmd.PersistentFlags().Lookup("client-ipcPath"))
viper.BindPFlag("client.levelDbPath", rootCmd.PersistentFlags().Lookup("client-levelDbPath")) viper.BindPFlag("client.levelDbPath", rootCmd.PersistentFlags().Lookup("client-levelDbPath"))
viper.BindPFlag("filesystem.storageDiffsPath", rootCmd.PersistentFlags().Lookup("filesystem-storageDiffsPath")) viper.BindPFlag("filesystem.storageDiffsPath", rootCmd.PersistentFlags().Lookup("filesystem-storageDiffsPath"))
viper.BindPFlag("storageDiffs.source", rootCmd.PersistentFlags().Lookup("storageDiffs-source"))
viper.BindPFlag("exporter.fileName", rootCmd.PersistentFlags().Lookup("exporter-name")) viper.BindPFlag("exporter.fileName", rootCmd.PersistentFlags().Lookup("exporter-name"))
viper.BindPFlag("log.level", rootCmd.PersistentFlags().Lookup("log-level")) viper.BindPFlag("log.level", rootCmd.PersistentFlags().Lookup("log-level"))
} }
@ -152,6 +156,14 @@ func initConfig() {
} }
func getBlockChain() *geth.BlockChain { func getBlockChain() *geth.BlockChain {
rpcClient, ethClient := getClients()
vdbEthClient := client.NewEthClient(ethClient)
vdbNode := node.MakeNode(rpcClient)
transactionConverter := vRpc.NewRpcTransactionConverter(ethClient)
return geth.NewBlockChain(vdbEthClient, rpcClient, vdbNode, transactionConverter)
}
func getClients() (client.RpcClient, *ethclient.Client) {
rawRpcClient, err := rpc.Dial(ipc) rawRpcClient, err := rpc.Dial(ipc)
if err != nil { if err != nil {
@ -159,8 +171,6 @@ func getBlockChain() *geth.BlockChain {
} }
rpcClient := client.NewRpcClient(rawRpcClient, ipc) rpcClient := client.NewRpcClient(rawRpcClient, ipc)
ethClient := ethclient.NewClient(rawRpcClient) ethClient := ethclient.NewClient(rawRpcClient)
vdbEthClient := client.NewEthClient(ethClient)
vdbNode := node.MakeNode(rpcClient) return rpcClient, ethClient
transactionConverter := vRpc.NewRpcTransactionConverter(ethClient)
return geth.NewBlockChain(vdbEthClient, rpcClient, vdbNode, transactionConverter)
} }

12
go.mod
View File

@ -12,9 +12,11 @@ require (
github.com/ethereum/go-ethereum v1.9.5 github.com/ethereum/go-ethereum v1.9.5
github.com/fjl/memsize v0.0.0-20190710130421-bcb5799ab5e5 // indirect github.com/fjl/memsize v0.0.0-20190710130421-bcb5799ab5e5 // indirect
github.com/gballet/go-libpcsclite v0.0.0-20190607065134-2772fd86a8ff // indirect github.com/gballet/go-libpcsclite v0.0.0-20190607065134-2772fd86a8ff // indirect
github.com/go-sql-driver/mysql v1.4.1 // indirect
github.com/golang/protobuf v1.3.2 // indirect
github.com/gorilla/websocket v1.4.1 // indirect github.com/gorilla/websocket v1.4.1 // indirect
github.com/graph-gophers/graphql-go v0.0.0-20190724201507-010347b5f9e6 // indirect github.com/hashicorp/golang-lru v0.5.3
github.com/hashicorp/golang-lru v0.5.1 github.com/howeyc/fsnotify v0.9.0 // indirect
github.com/hpcloud/tail v1.0.0 github.com/hpcloud/tail v1.0.0
github.com/huin/goupnp v1.0.0 // indirect github.com/huin/goupnp v1.0.0 // indirect
github.com/jackpal/go-nat-pmp v1.0.1 // indirect github.com/jackpal/go-nat-pmp v1.0.1 // indirect
@ -42,8 +44,14 @@ require (
github.com/syndtr/goleveldb v1.0.0 // indirect github.com/syndtr/goleveldb v1.0.0 // indirect
github.com/tyler-smith/go-bip39 v1.0.2 // indirect github.com/tyler-smith/go-bip39 v1.0.2 // indirect
github.com/wsddn/go-ecdh v0.0.0-20161211032359-48726bab9208 // indirect github.com/wsddn/go-ecdh v0.0.0-20161211032359-48726bab9208 // indirect
golang.org/x/crypto v0.0.0-20190926114937-fa1a29108794 // indirect
golang.org/x/net v0.0.0-20190603091049-60506f45cf65 golang.org/x/net v0.0.0-20190603091049-60506f45cf65
golang.org/x/sync v0.0.0-20190423024810-112230192c58 golang.org/x/sync v0.0.0-20190423024810-112230192c58
gopkg.in/olebedev/go-duktape.v3 v3.0.0-20190709231704-1e4459ed25ff // indirect gopkg.in/olebedev/go-duktape.v3 v3.0.0-20190709231704-1e4459ed25ff // indirect
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7
gopkg.in/urfave/cli.v1 v1.0.0-00010101000000-000000000000 // indirect
) )
replace github.com/ethereum/go-ethereum => github.com/vulcanize/go-ethereum v0.0.0-20190731183759-8e20673bd101
replace gopkg.in/urfave/cli.v1 => gopkg.in/urfave/cli.v1 v1.20.0

11
go.sum
View File

@ -91,6 +91,8 @@ github.com/graph-gophers/graphql-go v0.0.0-20190724201507-010347b5f9e6/go.mod h1
github.com/hashicorp/golang-lru v0.0.0-20180201235237-0fb14efe8c47/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.0.0-20180201235237-0fb14efe8c47/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
github.com/hashicorp/golang-lru v0.5.1 h1:0hERBMJE1eitiLkihrMvRVBYAkpHzc/J3QdDN+dAcgU= github.com/hashicorp/golang-lru v0.5.1 h1:0hERBMJE1eitiLkihrMvRVBYAkpHzc/J3QdDN+dAcgU=
github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
github.com/hashicorp/golang-lru v0.5.3 h1:YPkqC67at8FYaadspW/6uE0COsBxS2656RLEr8Bppgk=
github.com/hashicorp/golang-lru v0.5.3/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4=
github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4=
github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ=
github.com/howeyc/fsnotify v0.9.0 h1:0gtV5JmOKH4A8SsFxG2BczSeXWWPvcMT0euZt5gDAxY= github.com/howeyc/fsnotify v0.9.0 h1:0gtV5JmOKH4A8SsFxG2BczSeXWWPvcMT0euZt5gDAxY=
@ -241,6 +243,10 @@ github.com/tyler-smith/go-bip39 v1.0.0/go.mod h1:sJ5fKU0s6JVwZjjcUEX2zFOnvq0ASQ2
github.com/tyler-smith/go-bip39 v1.0.2 h1:+t3w+KwLXO6154GNJY+qUtIxLTmFjfUmpguQT1OlOT8= github.com/tyler-smith/go-bip39 v1.0.2 h1:+t3w+KwLXO6154GNJY+qUtIxLTmFjfUmpguQT1OlOT8=
github.com/tyler-smith/go-bip39 v1.0.2/go.mod h1:sJ5fKU0s6JVwZjjcUEX2zFOnvq0ASQ2K9Zr6cf67kNs= github.com/tyler-smith/go-bip39 v1.0.2/go.mod h1:sJ5fKU0s6JVwZjjcUEX2zFOnvq0ASQ2K9Zr6cf67kNs=
github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0= github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0=
github.com/vulcanize/go-ethereum v0.0.0-20190731183759-8e20673bd101 h1:fsHhBzscAwi4u7/F033SFJwTIz+46D8uDWMu2/ZdvzA=
github.com/vulcanize/go-ethereum v0.0.0-20190731183759-8e20673bd101 h1:fsHhBzscAwi4u7/F033SFJwTIz+46D8uDWMu2/ZdvzA=
github.com/vulcanize/go-ethereum v0.0.0-20190731183759-8e20673bd101/go.mod h1:9i0pGnKDUFFr8yC/n8xyrNBVfhYlpwE8J3Ge6ThKvug=
github.com/vulcanize/go-ethereum v0.0.0-20190731183759-8e20673bd101/go.mod h1:9i0pGnKDUFFr8yC/n8xyrNBVfhYlpwE8J3Ge6ThKvug=
github.com/vulcanize/vulcanizedb v0.0.5/go.mod h1:utXkheCL9VjTfmuivuvRiAAyHh54GSN9XRQNEbFCA8k= github.com/vulcanize/vulcanizedb v0.0.5/go.mod h1:utXkheCL9VjTfmuivuvRiAAyHh54GSN9XRQNEbFCA8k=
github.com/wsddn/go-ecdh v0.0.0-20161211032359-48726bab9208 h1:1cngl9mPEoITZG8s8cVcUy5CeIBYhEESkOB7m6Gmkrk= github.com/wsddn/go-ecdh v0.0.0-20161211032359-48726bab9208 h1:1cngl9mPEoITZG8s8cVcUy5CeIBYhEESkOB7m6Gmkrk=
github.com/wsddn/go-ecdh v0.0.0-20161211032359-48726bab9208/go.mod h1:IotVbo4F+mw0EzQ08zFqg7pK3FebNXpaMsRy2RT+Ees= github.com/wsddn/go-ecdh v0.0.0-20161211032359-48726bab9208/go.mod h1:IotVbo4F+mw0EzQ08zFqg7pK3FebNXpaMsRy2RT+Ees=
@ -252,6 +258,8 @@ golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2 h1:VklqNMn3ovrHsnt90Pveol
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5 h1:58fnuSXlxZmFdJyvtTFVmVhcMLU6v5fEb/ok4wyqtNU= golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5 h1:58fnuSXlxZmFdJyvtTFVmVhcMLU6v5fEb/ok4wyqtNU=
golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20190926114937-fa1a29108794 h1:4Yo9XtTfxfBCecLiBW8TYsFIdN7TkDhjGLWetFo4JSo=
golang.org/x/crypto v0.0.0-20190926114937-fa1a29108794/go.mod h1:/lpIB1dKB+9EgE3H3cr1v9wB50oz8l4C4h62xy7jSTY=
golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20181011144130-49bb7cea24b1/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181011144130-49bb7cea24b1/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20181114220301-adae6a3d119a h1:gOpx8G595UYyvj8UK4+OFyY4rx037g3fmfhe5SasG3U= golang.org/x/net v0.0.0-20181114220301-adae6a3d119a h1:gOpx8G595UYyvj8UK4+OFyY4rx037g3fmfhe5SasG3U=
@ -281,6 +289,8 @@ golang.org/x/sys v0.0.0-20190606165138-5da285871e9c h1:+EXw7AwNOKzPFXMZ1yNjO40aW
golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a h1:aYOabOQFp6Vj6W1F80affTUvO9UxmJRx8K0gsfABByQ= golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a h1:aYOabOQFp6Vj6W1F80affTUvO9UxmJRx8K0gsfABByQ=
golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190922100055-0a153f010e69 h1:rOhMmluY6kLMhdnrivzec6lLgaVbMHMn2ISQXJeJ5EM=
golang.org/x/sys v0.0.0-20190922100055-0a153f010e69/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg= golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.2 h1:tW2bmiBqwgJj/UpqtC8EpXEZVYOwU0yG4iWbprSVAcs= golang.org/x/text v0.3.2 h1:tW2bmiBqwgJj/UpqtC8EpXEZVYOwU0yG4iWbprSVAcs=
@ -300,6 +310,7 @@ gopkg.in/olebedev/go-duktape.v3 v3.0.0-20190709231704-1e4459ed25ff h1:uuol9OUzSv
gopkg.in/olebedev/go-duktape.v3 v3.0.0-20190709231704-1e4459ed25ff/go.mod h1:uAJfkITjFhyEEuUfm7bsmCZRbW5WRq8s9EY8HZ6hCns= gopkg.in/olebedev/go-duktape.v3 v3.0.0-20190709231704-1e4459ed25ff/go.mod h1:uAJfkITjFhyEEuUfm7bsmCZRbW5WRq8s9EY8HZ6hCns=
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ= gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ=
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
gopkg.in/urfave/cli.v1 v1.20.0 h1:NdAVW6RYxDif9DhDHaAortIu956m2c0v+09AZBPTbE0=
gopkg.in/urfave/cli.v1 v1.20.0/go.mod h1:vuBzUtMdQeixQj8LVd+/98pzhxNGQoyuPBlsXHOQNO0= gopkg.in/urfave/cli.v1 v1.20.0/go.mod h1:vuBzUtMdQeixQj8LVd+/98pzhxNGQoyuPBlsXHOQNO0=
gopkg.in/yaml.v2 v2.2.1 h1:mUhvW9EsL+naU5Q3cakzfE91YhliOondGd6ZrsDBHQE= gopkg.in/yaml.v2 v2.2.1 h1:mUhvW9EsL+naU5Q3cakzfE91YhliOondGd6ZrsDBHQE=
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=

View File

@ -18,7 +18,6 @@ package storage
import ( import (
"github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common"
"github.com/vulcanize/vulcanizedb/libraries/shared/storage" "github.com/vulcanize/vulcanizedb/libraries/shared/storage"
"github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils" "github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils"
"github.com/vulcanize/vulcanizedb/libraries/shared/transformer" "github.com/vulcanize/vulcanizedb/libraries/shared/transformer"
@ -26,7 +25,7 @@ import (
) )
type Transformer struct { type Transformer struct {
Address common.Address HashedAddress common.Hash
Mappings storage.Mappings Mappings storage.Mappings
Repository Repository Repository Repository
} }
@ -37,18 +36,18 @@ func (transformer Transformer) NewTransformer(db *postgres.DB) transformer.Stora
return transformer return transformer
} }
func (transformer Transformer) ContractAddress() common.Address { func (transformer Transformer) KeccakContractAddress() common.Hash {
return transformer.Address return transformer.HashedAddress
} }
func (transformer Transformer) Execute(row utils.StorageDiffRow) error { func (transformer Transformer) Execute(diff utils.StorageDiff) error {
metadata, lookupErr := transformer.Mappings.Lookup(row.StorageKey) metadata, lookupErr := transformer.Mappings.Lookup(diff.StorageKey)
if lookupErr != nil { if lookupErr != nil {
return lookupErr return lookupErr
} }
value, decodeErr := utils.Decode(row, metadata) value, decodeErr := utils.Decode(diff, metadata)
if decodeErr != nil { if decodeErr != nil {
return decodeErr return decodeErr
} }
return transformer.Repository.Create(row.BlockHeight, row.BlockHash.Hex(), metadata, value) return transformer.Repository.Create(diff.BlockHeight, diff.BlockHash.Hex(), metadata, value)
} }

View File

@ -20,7 +20,6 @@ import (
"github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common"
. "github.com/onsi/ginkgo" . "github.com/onsi/ginkgo"
. "github.com/onsi/gomega" . "github.com/onsi/gomega"
"github.com/vulcanize/vulcanizedb/libraries/shared/factories/storage" "github.com/vulcanize/vulcanizedb/libraries/shared/factories/storage"
"github.com/vulcanize/vulcanizedb/libraries/shared/mocks" "github.com/vulcanize/vulcanizedb/libraries/shared/mocks"
"github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils" "github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils"
@ -38,21 +37,21 @@ var _ = Describe("Storage transformer", func() {
mappings = &mocks.MockMappings{} mappings = &mocks.MockMappings{}
repository = &mocks.MockStorageRepository{} repository = &mocks.MockStorageRepository{}
t = storage.Transformer{ t = storage.Transformer{
Address: common.Address{}, HashedAddress: common.Hash{},
Mappings: mappings, Mappings: mappings,
Repository: repository, Repository: repository,
} }
}) })
It("returns the contract address being watched", func() { It("returns the contract address being watched", func() {
fakeAddress := common.HexToAddress("0x12345") fakeAddress := utils.HexToKeccak256Hash("0x12345")
t.Address = fakeAddress t.HashedAddress = fakeAddress
Expect(t.ContractAddress()).To(Equal(fakeAddress)) Expect(t.KeccakContractAddress()).To(Equal(fakeAddress))
}) })
It("looks up metadata for storage key", func() { It("looks up metadata for storage key", func() {
t.Execute(utils.StorageDiffRow{}) t.Execute(utils.StorageDiff{})
Expect(mappings.LookupCalled).To(BeTrue()) Expect(mappings.LookupCalled).To(BeTrue())
}) })
@ -60,7 +59,7 @@ var _ = Describe("Storage transformer", func() {
It("returns error if lookup fails", func() { It("returns error if lookup fails", func() {
mappings.LookupErr = fakes.FakeError mappings.LookupErr = fakes.FakeError
err := t.Execute(utils.StorageDiffRow{}) err := t.Execute(utils.StorageDiff{})
Expect(err).To(HaveOccurred()) Expect(err).To(HaveOccurred())
Expect(err).To(MatchError(fakes.FakeError)) Expect(err).To(MatchError(fakes.FakeError))
@ -72,8 +71,8 @@ var _ = Describe("Storage transformer", func() {
rawValue := common.HexToAddress("0x12345") rawValue := common.HexToAddress("0x12345")
fakeBlockNumber := 123 fakeBlockNumber := 123
fakeBlockHash := "0x67890" fakeBlockHash := "0x67890"
fakeRow := utils.StorageDiffRow{ fakeRow := utils.StorageDiff{
Contract: common.Address{}, HashedAddress: common.Hash{},
BlockHash: common.HexToHash(fakeBlockHash), BlockHash: common.HexToHash(fakeBlockHash),
BlockHeight: fakeBlockNumber, BlockHeight: fakeBlockNumber,
StorageKey: common.Hash{}, StorageKey: common.Hash{},
@ -95,7 +94,7 @@ var _ = Describe("Storage transformer", func() {
mappings.Metadata = fakeMetadata mappings.Metadata = fakeMetadata
repository.CreateErr = fakes.FakeError repository.CreateErr = fakes.FakeError
err := t.Execute(utils.StorageDiffRow{StorageValue: rawValue.Hash()}) err := t.Execute(utils.StorageDiff{StorageValue: rawValue.Hash()})
Expect(err).To(HaveOccurred()) Expect(err).To(HaveOccurred())
Expect(err).To(MatchError(fakes.FakeError)) Expect(err).To(MatchError(fakes.FakeError))
@ -120,8 +119,8 @@ var _ = Describe("Storage transformer", func() {
It("passes the decoded data items to the repository", func() { It("passes the decoded data items to the repository", func() {
mappings.Metadata = fakeMetadata mappings.Metadata = fakeMetadata
fakeRow := utils.StorageDiffRow{ fakeRow := utils.StorageDiff{
Contract: common.Address{}, HashedAddress: common.Hash{},
BlockHash: common.HexToHash(fakeBlockHash), BlockHash: common.HexToHash(fakeBlockHash),
BlockHeight: fakeBlockNumber, BlockHeight: fakeBlockNumber,
StorageKey: common.Hash{}, StorageKey: common.Hash{},
@ -144,7 +143,7 @@ var _ = Describe("Storage transformer", func() {
mappings.Metadata = fakeMetadata mappings.Metadata = fakeMetadata
repository.CreateErr = fakes.FakeError repository.CreateErr = fakes.FakeError
err := t.Execute(utils.StorageDiffRow{StorageValue: rawValue.Hash()}) err := t.Execute(utils.StorageDiff{StorageValue: rawValue.Hash()})
Expect(err).To(HaveOccurred()) Expect(err).To(HaveOccurred())
Expect(err).To(MatchError(fakes.FakeError)) Expect(err).To(MatchError(fakes.FakeError))

View File

@ -17,18 +17,12 @@
package fetcher package fetcher
import ( import (
"strings" "github.com/sirupsen/logrus"
log "github.com/sirupsen/logrus"
"github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils" "github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils"
"github.com/vulcanize/vulcanizedb/pkg/fs" "github.com/vulcanize/vulcanizedb/pkg/fs"
"strings"
) )
type IStorageFetcher interface {
FetchStorageDiffs(chan<- utils.StorageDiffRow, chan<- error)
}
type CsvTailStorageFetcher struct { type CsvTailStorageFetcher struct {
tailer fs.Tailer tailer fs.Tailer
} }
@ -37,18 +31,18 @@ func NewCsvTailStorageFetcher(tailer fs.Tailer) CsvTailStorageFetcher {
return CsvTailStorageFetcher{tailer: tailer} return CsvTailStorageFetcher{tailer: tailer}
} }
func (storageFetcher CsvTailStorageFetcher) FetchStorageDiffs(out chan<- utils.StorageDiffRow, errs chan<- error) { func (storageFetcher CsvTailStorageFetcher) FetchStorageDiffs(out chan<- utils.StorageDiff, errs chan<- error) {
t, tailErr := storageFetcher.tailer.Tail() t, tailErr := storageFetcher.tailer.Tail()
if tailErr != nil { if tailErr != nil {
errs <- tailErr errs <- tailErr
} }
log.Debug("fetching storage diffs...") logrus.Debug("fetching storage diffs...")
for line := range t.Lines { for line := range t.Lines {
row, parseErr := utils.FromStrings(strings.Split(line.Text, ",")) diff, parseErr := utils.FromParityCsvRow(strings.Split(line.Text, ","))
if parseErr != nil { if parseErr != nil {
errs <- parseErr errs <- parseErr
} else { } else {
out <- row out <- diff
} }
} }
} }

View File

@ -18,30 +18,28 @@ package fetcher_test
import ( import (
"fmt" "fmt"
"strings"
"time"
"github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common"
"github.com/hpcloud/tail" "github.com/hpcloud/tail"
. "github.com/onsi/ginkgo" . "github.com/onsi/ginkgo"
. "github.com/onsi/gomega" . "github.com/onsi/gomega"
"github.com/vulcanize/vulcanizedb/libraries/shared/fetcher" "github.com/vulcanize/vulcanizedb/libraries/shared/fetcher"
"github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils" "github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils"
"github.com/vulcanize/vulcanizedb/pkg/fakes" "github.com/vulcanize/vulcanizedb/pkg/fakes"
"strings"
"time"
) )
var _ = Describe("Csv Tail Storage Fetcher", func() { var _ = Describe("Csv Tail Storage Fetcher", func() {
var ( var (
errorsChannel chan error errorsChannel chan error
mockTailer *fakes.MockTailer mockTailer *fakes.MockTailer
rowsChannel chan utils.StorageDiffRow diffsChannel chan utils.StorageDiff
storageFetcher fetcher.CsvTailStorageFetcher storageFetcher fetcher.CsvTailStorageFetcher
) )
BeforeEach(func() { BeforeEach(func() {
errorsChannel = make(chan error) errorsChannel = make(chan error)
rowsChannel = make(chan utils.StorageDiffRow) diffsChannel = make(chan utils.StorageDiff)
mockTailer = fakes.NewMockTailer() mockTailer = fakes.NewMockTailer()
storageFetcher = fetcher.NewCsvTailStorageFetcher(mockTailer) storageFetcher = fetcher.NewCsvTailStorageFetcher(mockTailer)
}) })
@ -49,7 +47,7 @@ var _ = Describe("Csv Tail Storage Fetcher", func() {
It("adds error to errors channel if tailing file fails", func(done Done) { It("adds error to errors channel if tailing file fails", func(done Done) {
mockTailer.TailErr = fakes.FakeError mockTailer.TailErr = fakes.FakeError
go storageFetcher.FetchStorageDiffs(rowsChannel, errorsChannel) go storageFetcher.FetchStorageDiffs(diffsChannel, errorsChannel)
Expect(<-errorsChannel).To(MatchError(fakes.FakeError)) Expect(<-errorsChannel).To(MatchError(fakes.FakeError))
close(done) close(done)
@ -58,24 +56,24 @@ var _ = Describe("Csv Tail Storage Fetcher", func() {
It("adds parsed csv row to rows channel for storage diff", func(done Done) { It("adds parsed csv row to rows channel for storage diff", func(done Done) {
line := getFakeLine() line := getFakeLine()
go storageFetcher.FetchStorageDiffs(rowsChannel, errorsChannel) go storageFetcher.FetchStorageDiffs(diffsChannel, errorsChannel)
mockTailer.Lines <- line mockTailer.Lines <- line
expectedRow, err := utils.FromStrings(strings.Split(line.Text, ",")) expectedRow, err := utils.FromParityCsvRow(strings.Split(line.Text, ","))
Expect(err).NotTo(HaveOccurred()) Expect(err).NotTo(HaveOccurred())
Expect(<-rowsChannel).To(Equal(expectedRow)) Expect(<-diffsChannel).To(Equal(expectedRow))
close(done) close(done)
}) })
It("adds error to errors channel if parsing csv fails", func(done Done) { It("adds error to errors channel if parsing csv fails", func(done Done) {
line := &tail.Line{Text: "invalid"} line := &tail.Line{Text: "invalid"}
go storageFetcher.FetchStorageDiffs(rowsChannel, errorsChannel) go storageFetcher.FetchStorageDiffs(diffsChannel, errorsChannel)
mockTailer.Lines <- line mockTailer.Lines <- line
Expect(<-errorsChannel).To(HaveOccurred()) Expect(<-errorsChannel).To(HaveOccurred())
select { select {
case <-rowsChannel: case <-diffsChannel:
Fail("value passed to rows channel on error") Fail("value passed to rows channel on error")
default: default:
Succeed() Succeed()

View File

@ -0,0 +1,81 @@
// Copyright 2019 Vulcanize
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package fetcher
import (
"fmt"
"github.com/ethereum/go-ethereum/rlp"
"github.com/ethereum/go-ethereum/statediff"
"github.com/sirupsen/logrus"
"github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils"
"github.com/vulcanize/vulcanizedb/libraries/shared/streamer"
)
type GethRpcStorageFetcher struct {
statediffPayloadChan chan statediff.Payload
streamer streamer.Streamer
}
func NewGethRpcStorageFetcher(streamer streamer.Streamer, statediffPayloadChan chan statediff.Payload) GethRpcStorageFetcher {
return GethRpcStorageFetcher{
statediffPayloadChan: statediffPayloadChan,
streamer: streamer,
}
}
func (fetcher GethRpcStorageFetcher) FetchStorageDiffs(out chan<- utils.StorageDiff, errs chan<- error) {
ethStatediffPayloadChan := fetcher.statediffPayloadChan
clientSubscription, clientSubErr := fetcher.streamer.Stream(ethStatediffPayloadChan)
if clientSubErr != nil {
errs <- clientSubErr
panic(fmt.Sprintf("Error creating a geth client subscription: %v", clientSubErr))
}
logrus.Info("Successfully created a geth client subscription: ", clientSubscription)
for {
diff := <-ethStatediffPayloadChan
logrus.Trace("received a statediff")
stateDiff := new(statediff.StateDiff)
decodeErr := rlp.DecodeBytes(diff.StateDiffRlp, stateDiff)
if decodeErr != nil {
logrus.Warn("Error decoding state diff into RLP: ", decodeErr)
errs <- decodeErr
}
accounts := getAccountsFromDiff(*stateDiff)
logrus.Trace(fmt.Sprintf("iterating through %d accounts on stateDiff for block %d", len(accounts), stateDiff.BlockNumber))
for _, account := range accounts {
logrus.Trace(fmt.Sprintf("iterating through %d Storage values on account", len(account.Storage)))
for _, storage := range account.Storage {
diff, formatErr := utils.FromGethStateDiff(account, stateDiff, storage)
logrus.Trace("adding storage diff to out channel",
"keccak of address: ", diff.HashedAddress.Hex(),
"block height: ", diff.BlockHeight,
"storage key: ", diff.StorageKey.Hex(),
"storage value: ", diff.StorageValue.Hex())
if formatErr != nil {
errs <- formatErr
}
out <- diff
}
}
}
}
func getAccountsFromDiff(stateDiff statediff.StateDiff) []statediff.AccountDiff {
accounts := append(stateDiff.CreatedAccounts, stateDiff.UpdatedAccounts...)
return append(accounts, stateDiff.DeletedAccounts...)
}

View File

@ -0,0 +1,172 @@
// Copyright 2019 Vulcanize
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package fetcher_test
import (
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/rlp"
"github.com/ethereum/go-ethereum/rpc"
"github.com/ethereum/go-ethereum/statediff"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
"github.com/vulcanize/vulcanizedb/libraries/shared/fetcher"
"github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils"
"github.com/vulcanize/vulcanizedb/libraries/shared/test_data"
"github.com/vulcanize/vulcanizedb/pkg/fakes"
)
type MockStoragediffStreamer struct {
subscribeError error
PassedPayloadChan chan statediff.Payload
streamPayloads []statediff.Payload
}
func (streamer *MockStoragediffStreamer) Stream(statediffPayloadChan chan statediff.Payload) (*rpc.ClientSubscription, error) {
clientSubscription := rpc.ClientSubscription{}
streamer.PassedPayloadChan = statediffPayloadChan
go func() {
for _, payload := range streamer.streamPayloads {
streamer.PassedPayloadChan <- payload
}
}()
return &clientSubscription, streamer.subscribeError
}
func (streamer *MockStoragediffStreamer) SetSubscribeError(err error) {
streamer.subscribeError = err
}
func (streamer *MockStoragediffStreamer) SetPayloads(payloads []statediff.Payload) {
streamer.streamPayloads = payloads
}
var _ = Describe("Geth RPC Storage Fetcher", func() {
var streamer MockStoragediffStreamer
var statediffPayloadChan chan statediff.Payload
var statediffFetcher fetcher.GethRpcStorageFetcher
var storagediffChan chan utils.StorageDiff
var errorChan chan error
BeforeEach(func() {
streamer = MockStoragediffStreamer{}
statediffPayloadChan = make(chan statediff.Payload, 1)
statediffFetcher = fetcher.NewGethRpcStorageFetcher(&streamer, statediffPayloadChan)
storagediffChan = make(chan utils.StorageDiff)
errorChan = make(chan error)
})
It("adds errors to error channel if the RPC subscription fails and panics", func(done Done) {
streamer.SetSubscribeError(fakes.FakeError)
go func() {
failedSub := func() {
statediffFetcher.FetchStorageDiffs(storagediffChan, errorChan)
}
Expect(failedSub).To(Panic())
}()
Expect(<-errorChan).To(MatchError(fakes.FakeError))
close(done)
})
It("streams StatediffPayloads from a Geth RPC subscription", func(done Done) {
streamer.SetPayloads([]statediff.Payload{test_data.MockStatediffPayload})
go statediffFetcher.FetchStorageDiffs(storagediffChan, errorChan)
streamedPayload := <-statediffPayloadChan
Expect(streamedPayload).To(Equal(test_data.MockStatediffPayload))
Expect(streamer.PassedPayloadChan).To(Equal(statediffPayloadChan))
close(done)
})
It("adds errors to error channel if decoding the state diff RLP fails", func(done Done) {
badStatediffPayload := statediff.Payload{}
streamer.SetPayloads([]statediff.Payload{badStatediffPayload})
go statediffFetcher.FetchStorageDiffs(storagediffChan, errorChan)
Expect(<-errorChan).To(MatchError("EOF"))
close(done)
})
It("adds parsed statediff payloads to the rows channel", func(done Done) {
streamer.SetPayloads([]statediff.Payload{test_data.MockStatediffPayload})
go statediffFetcher.FetchStorageDiffs(storagediffChan, errorChan)
height := test_data.BlockNumber
intHeight := int(height.Int64())
createdExpectedStorageDiff := utils.StorageDiff{
HashedAddress: common.BytesToHash(test_data.ContractLeafKey[:]),
BlockHash: common.HexToHash("0xfa40fbe2d98d98b3363a778d52f2bcd29d6790b9b3f3cab2b167fd12d3550f73"),
BlockHeight: intHeight,
StorageKey: common.BytesToHash(test_data.StorageKey),
StorageValue: common.BytesToHash(test_data.SmallStorageValue),
}
updatedExpectedStorageDiff := utils.StorageDiff{
HashedAddress: common.BytesToHash(test_data.AnotherContractLeafKey[:]),
BlockHash: common.HexToHash("0xfa40fbe2d98d98b3363a778d52f2bcd29d6790b9b3f3cab2b167fd12d3550f73"),
BlockHeight: intHeight,
StorageKey: common.BytesToHash(test_data.StorageKey),
StorageValue: common.BytesToHash(test_data.LargeStorageValue),
}
deletedExpectedStorageDiff := utils.StorageDiff{
HashedAddress: common.BytesToHash(test_data.AnotherContractLeafKey[:]),
BlockHash: common.HexToHash("0xfa40fbe2d98d98b3363a778d52f2bcd29d6790b9b3f3cab2b167fd12d3550f73"),
BlockHeight: intHeight,
StorageKey: common.BytesToHash(test_data.StorageKey),
StorageValue: common.BytesToHash(test_data.SmallStorageValue),
}
createdStateDiff := <-storagediffChan
updatedStateDiff := <-storagediffChan
deletedStateDiff := <-storagediffChan
Expect(createdStateDiff).To(Equal(createdExpectedStorageDiff))
Expect(updatedStateDiff).To(Equal(updatedExpectedStorageDiff))
Expect(deletedStateDiff).To(Equal(deletedExpectedStorageDiff))
close(done)
})
It("adds errors to error channel if formatting the diff as a StateDiff object fails", func(done Done) {
accountDiffs := test_data.CreatedAccountDiffs
accountDiffs[0].Storage = []statediff.StorageDiff{test_data.StorageWithBadValue}
stateDiff := statediff.StateDiff{
BlockNumber: test_data.BlockNumber,
BlockHash: common.HexToHash(test_data.BlockHash),
CreatedAccounts: accountDiffs,
}
stateDiffRlp, err := rlp.EncodeToBytes(stateDiff)
Expect(err).NotTo(HaveOccurred())
badStatediffPayload := statediff.Payload{
StateDiffRlp: stateDiffRlp,
}
streamer.SetPayloads([]statediff.Payload{badStatediffPayload})
go statediffFetcher.FetchStorageDiffs(storagediffChan, errorChan)
Expect(<-errorChan).To(MatchError("rlp: input contains more than one value"))
close(done)
})
})

View File

@ -0,0 +1,21 @@
// Copyright 2018 Vulcanize
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package fetcher
import "github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils"
type IStorageFetcher interface {
FetchStorageDiffs(out chan<- utils.StorageDiff, errs chan<- error)
}

View File

@ -19,7 +19,7 @@ package mocks
import "github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils" import "github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils"
type MockStorageFetcher struct { type MockStorageFetcher struct {
RowsToReturn []utils.StorageDiffRow DiffsToReturn []utils.StorageDiff
ErrsToReturn []error ErrsToReturn []error
} }
@ -27,13 +27,13 @@ func NewMockStorageFetcher() *MockStorageFetcher {
return &MockStorageFetcher{} return &MockStorageFetcher{}
} }
func (fetcher *MockStorageFetcher) FetchStorageDiffs(out chan<- utils.StorageDiffRow, errs chan<- error) { func (fetcher *MockStorageFetcher) FetchStorageDiffs(out chan<- utils.StorageDiff, errs chan<- error) {
defer close(out) defer close(out)
defer close(errs) defer close(errs)
for _, err := range fetcher.ErrsToReturn { for _, err := range fetcher.ErrsToReturn {
errs <- err errs <- err
} }
for _, row := range fetcher.RowsToReturn { for _, diff := range fetcher.DiffsToReturn {
out <- row out <- diff
} }
} }

View File

@ -23,16 +23,16 @@ import (
type MockStorageQueue struct { type MockStorageQueue struct {
AddCalled bool AddCalled bool
AddError error AddError error
AddPassedRow utils.StorageDiffRow AddPassedDiff utils.StorageDiff
DeleteErr error DeleteErr error
DeletePassedId int DeletePassedId int
GetAllErr error GetAllErr error
RowsToReturn []utils.StorageDiffRow DiffsToReturn []utils.StorageDiff
} }
func (queue *MockStorageQueue) Add(row utils.StorageDiffRow) error { func (queue *MockStorageQueue) Add(diff utils.StorageDiff) error {
queue.AddCalled = true queue.AddCalled = true
queue.AddPassedRow = row queue.AddPassedDiff = diff
return queue.AddError return queue.AddError
} }
@ -41,6 +41,6 @@ func (queue *MockStorageQueue) Delete(id int) error {
return queue.DeleteErr return queue.DeleteErr
} }
func (queue *MockStorageQueue) GetAll() ([]utils.StorageDiffRow, error) { func (queue *MockStorageQueue) GetAll() ([]utils.StorageDiff, error) {
return queue.RowsToReturn, queue.GetAllErr return queue.DiffsToReturn, queue.GetAllErr
} }

View File

@ -18,25 +18,24 @@ package mocks
import ( import (
"github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common"
"github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils" "github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils"
"github.com/vulcanize/vulcanizedb/libraries/shared/transformer" "github.com/vulcanize/vulcanizedb/libraries/shared/transformer"
"github.com/vulcanize/vulcanizedb/pkg/datastore/postgres" "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres"
) )
type MockStorageTransformer struct { type MockStorageTransformer struct {
Address common.Address KeccakOfAddress common.Hash
ExecuteErr error ExecuteErr error
PassedRow utils.StorageDiffRow PassedDiff utils.StorageDiff
} }
func (transformer *MockStorageTransformer) Execute(row utils.StorageDiffRow) error { func (transformer *MockStorageTransformer) Execute(diff utils.StorageDiff) error {
transformer.PassedRow = row transformer.PassedDiff = diff
return transformer.ExecuteErr return transformer.ExecuteErr
} }
func (transformer *MockStorageTransformer) ContractAddress() common.Address { func (transformer *MockStorageTransformer) KeccakContractAddress() common.Hash {
return transformer.Address return transformer.KeccakOfAddress
} }
func (transformer *MockStorageTransformer) FakeTransformerInitializer(db *postgres.DB) transformer.StorageTransformer { func (transformer *MockStorageTransformer) FakeTransformerInitializer(db *postgres.DB) transformer.StorageTransformer {

View File

@ -46,16 +46,29 @@ const (
IndexEleven = "000000000000000000000000000000000000000000000000000000000000000b" IndexEleven = "000000000000000000000000000000000000000000000000000000000000000b"
) )
func AddHashedKeys(currentMappings map[common.Hash]utils.StorageValueMetadata) map[common.Hash]utils.StorageValueMetadata {
copyOfCurrentMappings := make(map[common.Hash]utils.StorageValueMetadata)
for k, v := range currentMappings {
copyOfCurrentMappings[k] = v
}
for k, v := range copyOfCurrentMappings {
currentMappings[hashKey(k)] = v
}
return currentMappings
}
func hashKey(key common.Hash) common.Hash {
return crypto.Keccak256Hash(key.Bytes())
}
func GetMapping(indexOnContract, key string) common.Hash { func GetMapping(indexOnContract, key string) common.Hash {
keyBytes := common.FromHex(key + indexOnContract) keyBytes := common.FromHex(key + indexOnContract)
encoded := crypto.Keccak256(keyBytes) return crypto.Keccak256Hash(keyBytes)
return common.BytesToHash(encoded)
} }
func GetNestedMapping(indexOnContract, primaryKey, secondaryKey string) common.Hash { func GetNestedMapping(indexOnContract, primaryKey, secondaryKey string) common.Hash {
primaryMappingIndex := crypto.Keccak256(common.FromHex(primaryKey + indexOnContract)) primaryMappingIndex := crypto.Keccak256(common.FromHex(primaryKey + indexOnContract))
secondaryMappingIndex := crypto.Keccak256(common.FromHex(secondaryKey), primaryMappingIndex) return crypto.Keccak256Hash(common.FromHex(secondaryKey), primaryMappingIndex)
return common.BytesToHash(secondaryMappingIndex)
} }
func GetIncrementedKey(original common.Hash, incrementBy int64) common.Hash { func GetIncrementedKey(original common.Hash, incrementBy int64) common.Hash {

View File

@ -5,9 +5,31 @@ import (
. "github.com/onsi/ginkgo" . "github.com/onsi/ginkgo"
. "github.com/onsi/gomega" . "github.com/onsi/gomega"
"github.com/vulcanize/vulcanizedb/libraries/shared/storage" "github.com/vulcanize/vulcanizedb/libraries/shared/storage"
"github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils"
) )
var _ = Describe("Mappings", func() { var _ = Describe("Mappings", func() {
Describe("AddHashedKeys", func() {
It("returns a copy of the map with an additional slot for the hashed version of every key", func() {
fakeMap := map[common.Hash]utils.StorageValueMetadata{}
fakeStorageKey := common.HexToHash("72c72de6b203d67cb6cd54fc93300109fcc6fd6eac88e390271a3d548794d800")
var fakeMappingKey utils.Key = "fakeKey"
fakeMetadata := utils.StorageValueMetadata{
Name: "fakeName",
Keys: map[utils.Key]string{fakeMappingKey: "fakeValue"},
Type: utils.Uint48,
}
fakeMap[fakeStorageKey] = fakeMetadata
result := storage.AddHashedKeys(fakeMap)
Expect(len(result)).To(Equal(2))
expectedHashedStorageKey := common.HexToHash("2165edb4e1c37b99b60fa510d84f939dd35d5cd1d1c8f299d6456ea09df65a76")
Expect(fakeMap[fakeStorageKey]).To(Equal(fakeMetadata))
Expect(fakeMap[expectedHashedStorageKey]).To(Equal(fakeMetadata))
})
})
Describe("GetMapping", func() { Describe("GetMapping", func() {
It("returns the storage key for a mapping when passed the mapping's index on the contract and the desired value's key", func() { It("returns the storage key for a mapping when passed the mapping's index on the contract and the desired value's key", func() {
// ex. solidity: // ex. solidity:

View File

@ -22,9 +22,9 @@ import (
) )
type IStorageQueue interface { type IStorageQueue interface {
Add(row utils.StorageDiffRow) error Add(diff utils.StorageDiff) error
Delete(id int) error Delete(id int) error
GetAll() ([]utils.StorageDiffRow, error) GetAll() ([]utils.StorageDiff, error)
} }
type StorageQueue struct { type StorageQueue struct {
@ -35,11 +35,11 @@ func NewStorageQueue(db *postgres.DB) StorageQueue {
return StorageQueue{db: db} return StorageQueue{db: db}
} }
func (queue StorageQueue) Add(row utils.StorageDiffRow) error { func (queue StorageQueue) Add(diff utils.StorageDiff) error {
_, err := queue.db.Exec(`INSERT INTO public.queued_storage (contract, _, err := queue.db.Exec(`INSERT INTO public.queued_storage (contract,
block_hash, block_height, storage_key, storage_value) VALUES block_hash, block_height, storage_key, storage_value) VALUES
($1, $2, $3, $4, $5) ON CONFLICT DO NOTHING`, row.Contract.Bytes(), row.BlockHash.Bytes(), ($1, $2, $3, $4, $5) ON CONFLICT DO NOTHING`, diff.HashedAddress.Bytes(), diff.BlockHash.Bytes(),
row.BlockHeight, row.StorageKey.Bytes(), row.StorageValue.Bytes()) diff.BlockHeight, diff.StorageKey.Bytes(), diff.StorageValue.Bytes())
return err return err
} }
@ -48,8 +48,8 @@ func (queue StorageQueue) Delete(id int) error {
return err return err
} }
func (queue StorageQueue) GetAll() ([]utils.StorageDiffRow, error) { func (queue StorageQueue) GetAll() ([]utils.StorageDiff, error) {
var result []utils.StorageDiffRow var result []utils.StorageDiff
err := queue.db.Select(&result, `SELECT * FROM public.queued_storage`) err := queue.db.Select(&result, `SELECT * FROM public.queued_storage`)
return result, err return result, err
} }

View File

@ -20,7 +20,6 @@ import (
"github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common"
. "github.com/onsi/ginkgo" . "github.com/onsi/ginkgo"
. "github.com/onsi/gomega" . "github.com/onsi/gomega"
"github.com/vulcanize/vulcanizedb/libraries/shared/storage" "github.com/vulcanize/vulcanizedb/libraries/shared/storage"
"github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils" "github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils"
"github.com/vulcanize/vulcanizedb/pkg/datastore/postgres" "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres"
@ -30,13 +29,14 @@ import (
var _ = Describe("Storage queue", func() { var _ = Describe("Storage queue", func() {
var ( var (
db *postgres.DB db *postgres.DB
row utils.StorageDiffRow diff utils.StorageDiff
queue storage.IStorageQueue queue storage.IStorageQueue
) )
BeforeEach(func() { BeforeEach(func() {
row = utils.StorageDiffRow{ fakeAddr := "0x123456"
Contract: common.HexToAddress("0x123456"), diff = utils.StorageDiff{
HashedAddress: utils.HexToKeccak256Hash(fakeAddr),
BlockHash: common.HexToHash("0x678901"), BlockHash: common.HexToHash("0x678901"),
BlockHeight: 987, BlockHeight: 987,
StorageKey: common.HexToHash("0x654321"), StorageKey: common.HexToHash("0x654321"),
@ -45,20 +45,20 @@ var _ = Describe("Storage queue", func() {
db = test_config.NewTestDB(test_config.NewTestNode()) db = test_config.NewTestDB(test_config.NewTestNode())
test_config.CleanTestDB(db) test_config.CleanTestDB(db)
queue = storage.NewStorageQueue(db) queue = storage.NewStorageQueue(db)
addErr := queue.Add(row) addErr := queue.Add(diff)
Expect(addErr).NotTo(HaveOccurred()) Expect(addErr).NotTo(HaveOccurred())
}) })
Describe("Add", func() { Describe("Add", func() {
It("adds a storage row to the db", func() { It("adds a storage diff to the db", func() {
var result utils.StorageDiffRow var result utils.StorageDiff
getErr := db.Get(&result, `SELECT contract, block_hash, block_height, storage_key, storage_value FROM public.queued_storage`) getErr := db.Get(&result, `SELECT contract, block_hash, block_height, storage_key, storage_value FROM public.queued_storage`)
Expect(getErr).NotTo(HaveOccurred()) Expect(getErr).NotTo(HaveOccurred())
Expect(result).To(Equal(row)) Expect(result).To(Equal(diff))
}) })
It("does not duplicate storage rows", func() { It("does not duplicate storage diffs", func() {
addErr := queue.Add(row) addErr := queue.Add(diff)
Expect(addErr).NotTo(HaveOccurred()) Expect(addErr).NotTo(HaveOccurred())
var count int var count int
getErr := db.Get(&count, `SELECT count(*) FROM public.queued_storage`) getErr := db.Get(&count, `SELECT count(*) FROM public.queued_storage`)
@ -67,12 +67,12 @@ var _ = Describe("Storage queue", func() {
}) })
}) })
It("deletes storage row from db", func() { It("deletes storage diff from db", func() {
rows, getErr := queue.GetAll() diffs, getErr := queue.GetAll()
Expect(getErr).NotTo(HaveOccurred()) Expect(getErr).NotTo(HaveOccurred())
Expect(len(rows)).To(Equal(1)) Expect(len(diffs)).To(Equal(1))
err := queue.Delete(rows[0].Id) err := queue.Delete(diffs[0].Id)
Expect(err).NotTo(HaveOccurred()) Expect(err).NotTo(HaveOccurred())
remainingRows, secondGetErr := queue.GetAll() remainingRows, secondGetErr := queue.GetAll()
@ -80,33 +80,34 @@ var _ = Describe("Storage queue", func() {
Expect(len(remainingRows)).To(BeZero()) Expect(len(remainingRows)).To(BeZero())
}) })
It("gets all storage rows from db", func() { It("gets all storage diffs from db", func() {
rowTwo := utils.StorageDiffRow{ fakeAddr := "0x234567"
Contract: common.HexToAddress("0x123456"), diffTwo := utils.StorageDiff{
HashedAddress: utils.HexToKeccak256Hash(fakeAddr),
BlockHash: common.HexToHash("0x678902"), BlockHash: common.HexToHash("0x678902"),
BlockHeight: 988, BlockHeight: 988,
StorageKey: common.HexToHash("0x654322"), StorageKey: common.HexToHash("0x654322"),
StorageValue: common.HexToHash("0x198766"), StorageValue: common.HexToHash("0x198766"),
} }
addErr := queue.Add(rowTwo) addErr := queue.Add(diffTwo)
Expect(addErr).NotTo(HaveOccurred()) Expect(addErr).NotTo(HaveOccurred())
rows, err := queue.GetAll() diffs, err := queue.GetAll()
Expect(err).NotTo(HaveOccurred()) Expect(err).NotTo(HaveOccurred())
Expect(len(rows)).To(Equal(2)) Expect(len(diffs)).To(Equal(2))
Expect(rows[0]).NotTo(Equal(rows[1])) Expect(diffs[0]).NotTo(Equal(diffs[1]))
Expect(rows[0].Id).NotTo(BeZero()) Expect(diffs[0].Id).NotTo(BeZero())
Expect(rows[0].Contract).To(Or(Equal(row.Contract), Equal(rowTwo.Contract))) Expect(diffs[0].HashedAddress).To(Or(Equal(diff.HashedAddress), Equal(diffTwo.HashedAddress)))
Expect(rows[0].BlockHash).To(Or(Equal(row.BlockHash), Equal(rowTwo.BlockHash))) Expect(diffs[0].BlockHash).To(Or(Equal(diff.BlockHash), Equal(diffTwo.BlockHash)))
Expect(rows[0].BlockHeight).To(Or(Equal(row.BlockHeight), Equal(rowTwo.BlockHeight))) Expect(diffs[0].BlockHeight).To(Or(Equal(diff.BlockHeight), Equal(diffTwo.BlockHeight)))
Expect(rows[0].StorageKey).To(Or(Equal(row.StorageKey), Equal(rowTwo.StorageKey))) Expect(diffs[0].StorageKey).To(Or(Equal(diff.StorageKey), Equal(diffTwo.StorageKey)))
Expect(rows[0].StorageValue).To(Or(Equal(row.StorageValue), Equal(rowTwo.StorageValue))) Expect(diffs[0].StorageValue).To(Or(Equal(diff.StorageValue), Equal(diffTwo.StorageValue)))
Expect(rows[1].Id).NotTo(BeZero()) Expect(diffs[1].Id).NotTo(BeZero())
Expect(rows[1].Contract).To(Or(Equal(row.Contract), Equal(rowTwo.Contract))) Expect(diffs[1].HashedAddress).To(Or(Equal(diff.HashedAddress), Equal(diffTwo.HashedAddress)))
Expect(rows[1].BlockHash).To(Or(Equal(row.BlockHash), Equal(rowTwo.BlockHash))) Expect(diffs[1].BlockHash).To(Or(Equal(diff.BlockHash), Equal(diffTwo.BlockHash)))
Expect(rows[1].BlockHeight).To(Or(Equal(row.BlockHeight), Equal(rowTwo.BlockHeight))) Expect(diffs[1].BlockHeight).To(Or(Equal(diff.BlockHeight), Equal(diffTwo.BlockHeight)))
Expect(rows[1].StorageKey).To(Or(Equal(row.StorageKey), Equal(rowTwo.StorageKey))) Expect(diffs[1].StorageKey).To(Or(Equal(diff.StorageKey), Equal(diffTwo.StorageKey)))
Expect(rows[1].StorageValue).To(Or(Equal(row.StorageValue), Equal(rowTwo.StorageValue))) Expect(diffs[1].StorageValue).To(Or(Equal(diff.StorageValue), Equal(diffTwo.StorageValue)))
}) })
}) })

View File

@ -27,20 +27,20 @@ const (
bitsPerByte = 8 bitsPerByte = 8
) )
func Decode(row StorageDiffRow, metadata StorageValueMetadata) (interface{}, error) { func Decode(diff StorageDiff, metadata StorageValueMetadata) (interface{}, error) {
switch metadata.Type { switch metadata.Type {
case Uint256: case Uint256:
return decodeInteger(row.StorageValue.Bytes()), nil return decodeInteger(diff.StorageValue.Bytes()), nil
case Uint48: case Uint48:
return decodeInteger(row.StorageValue.Bytes()), nil return decodeInteger(diff.StorageValue.Bytes()), nil
case Uint128: case Uint128:
return decodeInteger(row.StorageValue.Bytes()), nil return decodeInteger(diff.StorageValue.Bytes()), nil
case Address: case Address:
return decodeAddress(row.StorageValue.Bytes()), nil return decodeAddress(diff.StorageValue.Bytes()), nil
case Bytes32: case Bytes32:
return row.StorageValue.Hex(), nil return diff.StorageValue.Hex(), nil
case PackedSlot: case PackedSlot:
return decodePackedSlot(row.StorageValue.Bytes(), metadata.PackedTypes), nil return decodePackedSlot(diff.StorageValue.Bytes(), metadata.PackedTypes), nil
default: default:
panic(fmt.Sprintf("can't decode unknown type: %d", metadata.Type)) panic(fmt.Sprintf("can't decode unknown type: %d", metadata.Type))
} }

View File

@ -29,10 +29,10 @@ import (
var _ = Describe("Storage decoder", func() { var _ = Describe("Storage decoder", func() {
It("decodes uint256", func() { It("decodes uint256", func() {
fakeInt := common.HexToHash("0000000000000000000000000000000000000000000000000000000000000539") fakeInt := common.HexToHash("0000000000000000000000000000000000000000000000000000000000000539")
row := utils.StorageDiffRow{StorageValue: fakeInt} diff := utils.StorageDiff{StorageValue: fakeInt}
metadata := utils.StorageValueMetadata{Type: utils.Uint256} metadata := utils.StorageValueMetadata{Type: utils.Uint256}
result, err := utils.Decode(row, metadata) result, err := utils.Decode(diff, metadata)
Expect(err).NotTo(HaveOccurred()) Expect(err).NotTo(HaveOccurred())
Expect(result).To(Equal(big.NewInt(0).SetBytes(fakeInt.Bytes()).String())) Expect(result).To(Equal(big.NewInt(0).SetBytes(fakeInt.Bytes()).String()))
@ -40,10 +40,10 @@ var _ = Describe("Storage decoder", func() {
It("decodes uint128", func() { It("decodes uint128", func() {
fakeInt := common.HexToHash("0000000000000000000000000000000000000000000000000000000000011123") fakeInt := common.HexToHash("0000000000000000000000000000000000000000000000000000000000011123")
row := utils.StorageDiffRow{StorageValue: fakeInt} diff := utils.StorageDiff{StorageValue: fakeInt}
metadata := utils.StorageValueMetadata{Type: utils.Uint128} metadata := utils.StorageValueMetadata{Type: utils.Uint128}
result, err := utils.Decode(row, metadata) result, err := utils.Decode(diff, metadata)
Expect(err).NotTo(HaveOccurred()) Expect(err).NotTo(HaveOccurred())
Expect(result).To(Equal(big.NewInt(0).SetBytes(fakeInt.Bytes()).String())) Expect(result).To(Equal(big.NewInt(0).SetBytes(fakeInt.Bytes()).String()))
@ -51,10 +51,10 @@ var _ = Describe("Storage decoder", func() {
It("decodes uint48", func() { It("decodes uint48", func() {
fakeInt := common.HexToHash("0000000000000000000000000000000000000000000000000000000000000123") fakeInt := common.HexToHash("0000000000000000000000000000000000000000000000000000000000000123")
row := utils.StorageDiffRow{StorageValue: fakeInt} diff := utils.StorageDiff{StorageValue: fakeInt}
metadata := utils.StorageValueMetadata{Type: utils.Uint48} metadata := utils.StorageValueMetadata{Type: utils.Uint48}
result, err := utils.Decode(row, metadata) result, err := utils.Decode(diff, metadata)
Expect(err).NotTo(HaveOccurred()) Expect(err).NotTo(HaveOccurred())
Expect(result).To(Equal(big.NewInt(0).SetBytes(fakeInt.Bytes()).String())) Expect(result).To(Equal(big.NewInt(0).SetBytes(fakeInt.Bytes()).String()))
@ -62,10 +62,10 @@ var _ = Describe("Storage decoder", func() {
It("decodes address", func() { It("decodes address", func() {
fakeAddress := common.HexToAddress("0x12345") fakeAddress := common.HexToAddress("0x12345")
row := utils.StorageDiffRow{StorageValue: fakeAddress.Hash()} diff := utils.StorageDiff{StorageValue: fakeAddress.Hash()}
metadata := utils.StorageValueMetadata{Type: utils.Address} metadata := utils.StorageValueMetadata{Type: utils.Address}
result, err := utils.Decode(row, metadata) result, err := utils.Decode(diff, metadata)
Expect(err).NotTo(HaveOccurred()) Expect(err).NotTo(HaveOccurred())
Expect(result).To(Equal(fakeAddress.Hex())) Expect(result).To(Equal(fakeAddress.Hex()))
@ -75,7 +75,7 @@ var _ = Describe("Storage decoder", func() {
It("decodes uint48 items", func() { It("decodes uint48 items", func() {
//this is a real storage data example //this is a real storage data example
packedStorage := common.HexToHash("000000000000000000000000000000000000000000000002a300000000002a30") packedStorage := common.HexToHash("000000000000000000000000000000000000000000000002a300000000002a30")
row := utils.StorageDiffRow{StorageValue: packedStorage} diff := utils.StorageDiff{StorageValue: packedStorage}
packedTypes := map[int]utils.ValueType{} packedTypes := map[int]utils.ValueType{}
packedTypes[0] = utils.Uint48 packedTypes[0] = utils.Uint48
packedTypes[1] = utils.Uint48 packedTypes[1] = utils.Uint48
@ -85,7 +85,7 @@ var _ = Describe("Storage decoder", func() {
PackedTypes: packedTypes, PackedTypes: packedTypes,
} }
result, err := utils.Decode(row, metadata) result, err := utils.Decode(diff, metadata)
decodedValues := result.(map[int]string) decodedValues := result.(map[int]string)
Expect(err).NotTo(HaveOccurred()) Expect(err).NotTo(HaveOccurred())
@ -99,7 +99,7 @@ var _ = Describe("Storage decoder", func() {
packedStorageHex := "0000000A5D1AFFFFFFFFFFFE00000009F3C600000002A300000000002A30" packedStorageHex := "0000000A5D1AFFFFFFFFFFFE00000009F3C600000002A300000000002A30"
packedStorage := common.HexToHash(packedStorageHex) packedStorage := common.HexToHash(packedStorageHex)
row := utils.StorageDiffRow{StorageValue: packedStorage} diff := utils.StorageDiff{StorageValue: packedStorage}
packedTypes := map[int]utils.ValueType{} packedTypes := map[int]utils.ValueType{}
packedTypes[0] = utils.Uint48 packedTypes[0] = utils.Uint48
packedTypes[1] = utils.Uint48 packedTypes[1] = utils.Uint48
@ -112,7 +112,7 @@ var _ = Describe("Storage decoder", func() {
PackedTypes: packedTypes, PackedTypes: packedTypes,
} }
result, err := utils.Decode(row, metadata) result, err := utils.Decode(diff, metadata)
decodedValues := result.(map[int]string) decodedValues := result.(map[int]string)
Expect(err).NotTo(HaveOccurred()) Expect(err).NotTo(HaveOccurred())
@ -129,7 +129,7 @@ var _ = Describe("Storage decoder", func() {
packedStorageHex := "000000038D7EA4C67FF8E502B6730000" + packedStorageHex := "000000038D7EA4C67FF8E502B6730000" +
"0000000000000000AB54A98CEB1F0AD2" "0000000000000000AB54A98CEB1F0AD2"
packedStorage := common.HexToHash(packedStorageHex) packedStorage := common.HexToHash(packedStorageHex)
row := utils.StorageDiffRow{StorageValue: packedStorage} diff := utils.StorageDiff{StorageValue: packedStorage}
packedTypes := map[int]utils.ValueType{} packedTypes := map[int]utils.ValueType{}
packedTypes[0] = utils.Uint128 packedTypes[0] = utils.Uint128
packedTypes[1] = utils.Uint128 packedTypes[1] = utils.Uint128
@ -139,7 +139,7 @@ var _ = Describe("Storage decoder", func() {
PackedTypes: packedTypes, PackedTypes: packedTypes,
} }
result, err := utils.Decode(row, metadata) result, err := utils.Decode(diff, metadata)
decodedValues := result.(map[int]string) decodedValues := result.(map[int]string)
Expect(err).NotTo(HaveOccurred()) Expect(err).NotTo(HaveOccurred())
@ -151,7 +151,7 @@ var _ = Describe("Storage decoder", func() {
//TODO: replace with real data when available //TODO: replace with real data when available
addressHex := "0000000000000000000000000000000000012345" addressHex := "0000000000000000000000000000000000012345"
packedStorage := common.HexToHash("00000002a300" + "000000002a30" + addressHex) packedStorage := common.HexToHash("00000002a300" + "000000002a30" + addressHex)
row := utils.StorageDiffRow{StorageValue: packedStorage} row := utils.StorageDiff{StorageValue: packedStorage}
packedTypes := map[int]utils.ValueType{} packedTypes := map[int]utils.ValueType{}
packedTypes[0] = utils.Address packedTypes[0] = utils.Address
packedTypes[1] = utils.Uint48 packedTypes[1] = utils.Uint48

View File

@ -0,0 +1,73 @@
// VulcanizeDB
// Copyright © 2019 Vulcanize
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
package utils
import (
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/crypto"
"github.com/ethereum/go-ethereum/rlp"
"github.com/ethereum/go-ethereum/statediff"
"strconv"
)
const ExpectedRowLength = 5
type StorageDiff struct {
Id int
HashedAddress common.Hash `db:"contract"`
BlockHash common.Hash `db:"block_hash"`
BlockHeight int `db:"block_height"`
StorageKey common.Hash `db:"storage_key"`
StorageValue common.Hash `db:"storage_value"`
}
func FromParityCsvRow(csvRow []string) (StorageDiff, error) {
if len(csvRow) != ExpectedRowLength {
return StorageDiff{}, ErrRowMalformed{Length: len(csvRow)}
}
height, err := strconv.Atoi(csvRow[2])
if err != nil {
return StorageDiff{}, err
}
return StorageDiff{
HashedAddress: HexToKeccak256Hash(csvRow[0]),
BlockHash: common.HexToHash(csvRow[1]),
BlockHeight: height,
StorageKey: common.HexToHash(csvRow[3]),
StorageValue: common.HexToHash(csvRow[4]),
}, nil
}
func FromGethStateDiff(account statediff.AccountDiff, stateDiff *statediff.StateDiff, storage statediff.StorageDiff) (StorageDiff, error) {
var decodedValue []byte
err := rlp.DecodeBytes(storage.Value, &decodedValue)
if err != nil {
return StorageDiff{}, err
}
return StorageDiff{
HashedAddress: common.BytesToHash(account.Key),
BlockHash: stateDiff.BlockHash,
BlockHeight: int(stateDiff.BlockNumber.Int64()),
StorageKey: common.BytesToHash(storage.Key),
StorageValue: common.BytesToHash(decodedValue),
}, nil
}
func HexToKeccak256Hash(hex string) common.Hash {
return crypto.Keccak256Hash(common.FromHex(hex))
}

View File

@ -0,0 +1,121 @@
// VulcanizeDB
// Copyright © 2019 Vulcanize
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
package utils_test
import (
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/rlp"
"github.com/ethereum/go-ethereum/statediff"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
"github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils"
"github.com/vulcanize/vulcanizedb/libraries/shared/test_data"
"github.com/vulcanize/vulcanizedb/pkg/fakes"
"math/big"
"math/rand"
)
var _ = Describe("Storage row parsing", func() {
Describe("FromParityCsvRow", func() {
It("converts an array of strings to a row struct", func() {
contract := "0x123"
blockHash := "0x456"
blockHeight := "789"
storageKey := "0x987"
storageValue := "0x654"
data := []string{contract, blockHash, blockHeight, storageKey, storageValue}
result, err := utils.FromParityCsvRow(data)
Expect(err).NotTo(HaveOccurred())
expectedKeccakOfContractAddress := utils.HexToKeccak256Hash(contract)
Expect(result.HashedAddress).To(Equal(expectedKeccakOfContractAddress))
Expect(result.BlockHash).To(Equal(common.HexToHash(blockHash)))
Expect(result.BlockHeight).To(Equal(789))
Expect(result.StorageKey).To(Equal(common.HexToHash(storageKey)))
Expect(result.StorageValue).To(Equal(common.HexToHash(storageValue)))
})
It("returns an error if row is missing data", func() {
_, err := utils.FromParityCsvRow([]string{"0x123"})
Expect(err).To(HaveOccurred())
Expect(err).To(MatchError(utils.ErrRowMalformed{Length: 1}))
})
It("returns error if block height malformed", func() {
_, err := utils.FromParityCsvRow([]string{"", "", "", "", ""})
Expect(err).To(HaveOccurred())
})
})
Describe("FromGethStateDiff", func() {
var (
accountDiff = statediff.AccountDiff{Key: []byte{1, 2, 3, 4, 5, 6, 7, 8, 9, 0}}
stateDiff = &statediff.StateDiff{
BlockNumber: big.NewInt(rand.Int63()),
BlockHash: fakes.FakeHash,
}
)
It("adds relevant fields to diff", func() {
storageValueBytes := []byte{3}
storageValueRlp, encodeErr := rlp.EncodeToBytes(storageValueBytes)
Expect(encodeErr).NotTo(HaveOccurred())
storageDiff := statediff.StorageDiff{
Key: []byte{0, 9, 8, 7, 6, 5, 4, 3, 2, 1},
Value: storageValueRlp,
}
result, err := utils.FromGethStateDiff(accountDiff, stateDiff, storageDiff)
Expect(err).NotTo(HaveOccurred())
expectedAddress := common.BytesToHash(accountDiff.Key)
Expect(result.HashedAddress).To(Equal(expectedAddress))
Expect(result.BlockHash).To(Equal(fakes.FakeHash))
expectedBlockHeight := int(stateDiff.BlockNumber.Int64())
Expect(result.BlockHeight).To(Equal(expectedBlockHeight))
expectedStorageKey := common.BytesToHash(storageDiff.Key)
Expect(result.StorageKey).To(Equal(expectedStorageKey))
expectedStorageValue := common.BytesToHash(storageValueBytes)
Expect(result.StorageValue).To(Equal(expectedStorageValue))
})
It("handles decoding large storage values from their RLP", func() {
storageValueBytes := []byte{1, 2, 3, 4, 5, 0, 9, 8, 7, 6}
storageValueRlp, encodeErr := rlp.EncodeToBytes(storageValueBytes)
Expect(encodeErr).NotTo(HaveOccurred())
storageDiff := statediff.StorageDiff{
Key: []byte{0, 9, 8, 7, 6, 5, 4, 3, 2, 1},
Value: storageValueRlp,
}
result, err := utils.FromGethStateDiff(accountDiff, stateDiff, storageDiff)
Expect(err).NotTo(HaveOccurred())
Expect(result.StorageValue).To(Equal(common.BytesToHash(storageValueBytes)))
})
It("returns an err if decoding the storage value Rlp fails", func() {
_, err := utils.FromGethStateDiff(accountDiff, stateDiff, test_data.StorageWithBadValue)
Expect(err).To(HaveOccurred())
Expect(err).To(MatchError("rlp: input contains more than one value"))
})
})
})

View File

@ -1,51 +0,0 @@
// VulcanizeDB
// Copyright © 2019 Vulcanize
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
package utils
import (
"strconv"
"github.com/ethereum/go-ethereum/common"
)
const ExpectedRowLength = 5
type StorageDiffRow struct {
Id int
Contract common.Address
BlockHash common.Hash `db:"block_hash"`
BlockHeight int `db:"block_height"`
StorageKey common.Hash `db:"storage_key"`
StorageValue common.Hash `db:"storage_value"`
}
func FromStrings(csvRow []string) (StorageDiffRow, error) {
if len(csvRow) != ExpectedRowLength {
return StorageDiffRow{}, ErrRowMalformed{Length: len(csvRow)}
}
height, err := strconv.Atoi(csvRow[2])
if err != nil {
return StorageDiffRow{}, err
}
return StorageDiffRow{
Contract: common.HexToAddress(csvRow[0]),
BlockHash: common.HexToHash(csvRow[1]),
BlockHeight: height,
StorageKey: common.HexToHash(csvRow[3]),
StorageValue: common.HexToHash(csvRow[4]),
}, nil
}

View File

@ -1,58 +0,0 @@
// VulcanizeDB
// Copyright © 2019 Vulcanize
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
package utils_test
import (
"github.com/ethereum/go-ethereum/common"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
"github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils"
)
var _ = Describe("Storage row parsing", func() {
It("converts an array of strings to a row struct", func() {
contract := "0x123"
blockHash := "0x456"
blockHeight := "789"
storageKey := "0x987"
storageValue := "0x654"
data := []string{contract, blockHash, blockHeight, storageKey, storageValue}
result, err := utils.FromStrings(data)
Expect(err).NotTo(HaveOccurred())
Expect(result.Contract).To(Equal(common.HexToAddress(contract)))
Expect(result.BlockHash).To(Equal(common.HexToHash(blockHash)))
Expect(result.BlockHeight).To(Equal(789))
Expect(result.StorageKey).To(Equal(common.HexToHash(storageKey)))
Expect(result.StorageValue).To(Equal(common.HexToHash(storageValue)))
})
It("returns an error if row is missing data", func() {
_, err := utils.FromStrings([]string{"0x123"})
Expect(err).To(HaveOccurred())
Expect(err).To(MatchError(utils.ErrRowMalformed{Length: 1}))
})
It("returns error if block height malformed", func() {
_, err := utils.FromStrings([]string{"", "", "", "", ""})
Expect(err).To(HaveOccurred())
})
})

View File

@ -0,0 +1,41 @@
// Copyright 2019 Vulcanize
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package streamer
import (
"github.com/ethereum/go-ethereum/rpc"
"github.com/ethereum/go-ethereum/statediff"
"github.com/sirupsen/logrus"
"github.com/vulcanize/vulcanizedb/pkg/core"
)
type Streamer interface {
Stream(chan statediff.Payload) (*rpc.ClientSubscription, error)
}
type StateDiffStreamer struct {
client core.RpcClient
}
func (streamer *StateDiffStreamer) Stream(payloadChan chan statediff.Payload) (*rpc.ClientSubscription, error) {
logrus.Info("streaming diffs from geth")
return streamer.client.Subscribe("statediff", payloadChan, "stream")
}
func NewStateDiffStreamer(client core.RpcClient) StateDiffStreamer {
return StateDiffStreamer{
client: client,
}
}

View File

@ -0,0 +1,35 @@
// Copyright 2019 Vulcanize
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package streamer_test
import (
"github.com/ethereum/go-ethereum/statediff"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
"github.com/vulcanize/vulcanizedb/libraries/shared/streamer"
"github.com/vulcanize/vulcanizedb/pkg/fakes"
)
var _ = Describe("StateDiff Streamer", func() {
It("subscribes to the geth statediff service", func() {
client := &fakes.MockRpcClient{}
streamer := streamer.NewStateDiffStreamer(client)
payloadChan := make(chan statediff.Payload)
_, err := streamer.Stream(payloadChan)
Expect(err).NotTo(HaveOccurred())
client.AssertSubscribeCalledWith("statediff", payloadChan, []interface{}{"stream"})
})
})

View File

@ -0,0 +1,19 @@
package streamer_test
import (
"io/ioutil"
"testing"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
"github.com/sirupsen/logrus"
)
func TestStreamer(t *testing.T) {
RegisterFailHandler(Fail)
RunSpecs(t, "Streamer Suite")
}
var _ = BeforeSuite(func() {
logrus.SetOutput(ioutil.Discard)
})

View File

@ -0,0 +1,137 @@
// Copyright 2018 Vulcanize
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package test_data
import (
"errors"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/core/state"
"github.com/ethereum/go-ethereum/core/types"
"github.com/ethereum/go-ethereum/crypto"
"github.com/ethereum/go-ethereum/rlp"
"github.com/ethereum/go-ethereum/statediff"
"math/big"
"math/rand"
)
var (
BlockNumber = big.NewInt(rand.Int63())
BlockHash = "0xfa40fbe2d98d98b3363a778d52f2bcd29d6790b9b3f3cab2b167fd12d3550f73"
CodeHash = common.Hex2Bytes("0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470")
NewNonceValue = rand.Uint64()
NewBalanceValue = rand.Int63()
ContractRoot = common.HexToHash("0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421")
StoragePath = common.HexToHash("0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470").Bytes()
StorageKey = common.HexToHash("0000000000000000000000000000000000000000000000000000000000000001").Bytes()
SmallStorageValue = common.Hex2Bytes("03")
SmallStorageValueRlp, _ = rlp.EncodeToBytes(SmallStorageValue)
storageWithSmallValue = []statediff.StorageDiff{{
Key: StorageKey,
Value: SmallStorageValueRlp,
Path: StoragePath,
Proof: [][]byte{},
}}
LargeStorageValue = common.Hex2Bytes("00191b53778c567b14b50ba0000")
LargeStorageValueRlp, rlpErr = rlp.EncodeToBytes(LargeStorageValue)
storageWithLargeValue = []statediff.StorageDiff{{
Key: StorageKey,
Value: LargeStorageValueRlp,
Path: StoragePath,
Proof: [][]byte{},
}}
EmptyStorage = make([]statediff.StorageDiff, 0)
StorageWithBadValue = statediff.StorageDiff{
Key: StorageKey,
Value: []byte{0, 1, 2},
// this storage value will fail to be decoded as an RLP with the following error message:
// "input contains more than one value"
}
contractAddress = common.HexToAddress("0xaE9BEa628c4Ce503DcFD7E305CaB4e29E7476592")
ContractLeafKey = crypto.Keccak256Hash(contractAddress[:])
anotherContractAddress = common.HexToAddress("0xaE9BEa628c4Ce503DcFD7E305CaB4e29E7476593")
AnotherContractLeafKey = crypto.Keccak256Hash(anotherContractAddress[:])
testAccount = state.Account{
Nonce: NewNonceValue,
Balance: big.NewInt(NewBalanceValue),
Root: ContractRoot,
CodeHash: CodeHash,
}
valueBytes, _ = rlp.EncodeToBytes(testAccount)
CreatedAccountDiffs = []statediff.AccountDiff{
{
Key: ContractLeafKey.Bytes(),
Value: valueBytes,
Storage: storageWithSmallValue,
},
}
UpdatedAccountDiffs = []statediff.AccountDiff{{
Key: AnotherContractLeafKey.Bytes(),
Value: valueBytes,
Storage: storageWithLargeValue,
}}
DeletedAccountDiffs = []statediff.AccountDiff{{
Key: AnotherContractLeafKey.Bytes(),
Value: valueBytes,
Storage: storageWithSmallValue,
}}
MockStateDiff = statediff.StateDiff{
BlockNumber: BlockNumber,
BlockHash: common.HexToHash(BlockHash),
CreatedAccounts: CreatedAccountDiffs,
DeletedAccounts: DeletedAccountDiffs,
UpdatedAccounts: UpdatedAccountDiffs,
}
MockStateDiffBytes, _ = rlp.EncodeToBytes(MockStateDiff)
mockTransaction1 = types.NewTransaction(0, common.HexToAddress("0x0"), big.NewInt(1000), 50, big.NewInt(100), nil)
mockTransaction2 = types.NewTransaction(1, common.HexToAddress("0x1"), big.NewInt(2000), 100, big.NewInt(200), nil)
MockTransactions = types.Transactions{mockTransaction1, mockTransaction2}
mockReceipt1 = types.NewReceipt(common.HexToHash("0x0").Bytes(), false, 50)
mockReceipt2 = types.NewReceipt(common.HexToHash("0x1").Bytes(), false, 100)
MockReceipts = types.Receipts{mockReceipt1, mockReceipt2}
MockHeader = types.Header{
Time: 0,
Number: BlockNumber,
Root: common.HexToHash("0x0"),
TxHash: common.HexToHash("0x0"),
ReceiptHash: common.HexToHash("0x0"),
}
MockBlock = types.NewBlock(&MockHeader, MockTransactions, nil, MockReceipts)
MockBlockRlp, _ = rlp.EncodeToBytes(MockBlock)
MockStatediffPayload = statediff.Payload{
BlockRlp: MockBlockRlp,
StateDiffRlp: MockStateDiffBytes,
Err: nil,
}
EmptyStatediffPayload = statediff.Payload{
BlockRlp: []byte{},
StateDiffRlp: []byte{},
Err: nil,
}
ErrStatediffPayload = statediff.Payload{
BlockRlp: []byte{},
StateDiffRlp: []byte{},
Err: errors.New("mock error"),
}
)

View File

@ -18,14 +18,13 @@ package transformer
import ( import (
"github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common"
"github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils" "github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils"
"github.com/vulcanize/vulcanizedb/pkg/datastore/postgres" "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres"
) )
type StorageTransformer interface { type StorageTransformer interface {
Execute(row utils.StorageDiffRow) error Execute(diff utils.StorageDiff) error
ContractAddress() common.Address KeccakContractAddress() common.Hash
} }
type StorageTransformerInitializer func(db *postgres.DB) StorageTransformer type StorageTransformerInitializer func(db *postgres.DB) StorageTransformer

View File

@ -18,69 +18,76 @@ package watcher
import ( import (
"fmt" "fmt"
"reflect"
"time"
"github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common"
"github.com/sirupsen/logrus" "github.com/sirupsen/logrus"
"github.com/vulcanize/vulcanizedb/libraries/shared/fetcher" "github.com/vulcanize/vulcanizedb/libraries/shared/fetcher"
"github.com/vulcanize/vulcanizedb/libraries/shared/storage" "github.com/vulcanize/vulcanizedb/libraries/shared/storage"
"github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils" "github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils"
"github.com/vulcanize/vulcanizedb/libraries/shared/transformer" "github.com/vulcanize/vulcanizedb/libraries/shared/transformer"
"github.com/vulcanize/vulcanizedb/pkg/datastore/postgres" "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres"
"time"
) )
type IStorageWatcher interface {
AddTransformers(initializers []transformer.StorageTransformerInitializer)
Execute(diffsChan chan utils.StorageDiff, errsChan chan error, queueRecheckInterval time.Duration)
}
type StorageWatcher struct { type StorageWatcher struct {
db *postgres.DB db *postgres.DB
StorageFetcher fetcher.IStorageFetcher StorageFetcher fetcher.IStorageFetcher
Queue storage.IStorageQueue Queue storage.IStorageQueue
Transformers map[common.Address]transformer.StorageTransformer KeccakAddressTransformers map[common.Hash]transformer.StorageTransformer // keccak hash of an address => transformer
} }
func NewStorageWatcher(fetcher fetcher.IStorageFetcher, db *postgres.DB) StorageWatcher { func NewStorageWatcher(fetcher fetcher.IStorageFetcher, db *postgres.DB) StorageWatcher {
transformers := make(map[common.Address]transformer.StorageTransformer)
queue := storage.NewStorageQueue(db) queue := storage.NewStorageQueue(db)
transformers := make(map[common.Hash]transformer.StorageTransformer)
return StorageWatcher{ return StorageWatcher{
db: db, db: db,
StorageFetcher: fetcher, StorageFetcher: fetcher,
Queue: queue, Queue: queue,
Transformers: transformers, KeccakAddressTransformers: transformers,
} }
} }
func (storageWatcher StorageWatcher) AddTransformers(initializers []transformer.StorageTransformerInitializer) { func (storageWatcher StorageWatcher) AddTransformers(initializers []transformer.StorageTransformerInitializer) {
for _, initializer := range initializers { for _, initializer := range initializers {
storageTransformer := initializer(storageWatcher.db) storageTransformer := initializer(storageWatcher.db)
storageWatcher.Transformers[storageTransformer.ContractAddress()] = storageTransformer storageWatcher.KeccakAddressTransformers[storageTransformer.KeccakContractAddress()] = storageTransformer
} }
} }
func (storageWatcher StorageWatcher) Execute(rows chan utils.StorageDiffRow, errs chan error, queueRecheckInterval time.Duration) { func (storageWatcher StorageWatcher) Execute(diffsChan chan utils.StorageDiff, errsChan chan error, queueRecheckInterval time.Duration) {
ticker := time.NewTicker(queueRecheckInterval) ticker := time.NewTicker(queueRecheckInterval)
go storageWatcher.StorageFetcher.FetchStorageDiffs(rows, errs) go storageWatcher.StorageFetcher.FetchStorageDiffs(diffsChan, errsChan)
for { for {
select { select {
case fetchErr := <-errs: case fetchErr := <-errsChan:
logrus.Warn(fmt.Sprintf("error fetching storage diffs: %s", fetchErr)) logrus.Warn(fmt.Sprintf("error fetching storage diffs: %s", fetchErr))
case row := <-rows: case diff := <-diffsChan:
storageWatcher.processRow(row) storageWatcher.processRow(diff)
case <-ticker.C: case <-ticker.C:
storageWatcher.processQueue() storageWatcher.processQueue()
} }
} }
} }
func (storageWatcher StorageWatcher) processRow(row utils.StorageDiffRow) { func (storageWatcher StorageWatcher) getTransformer(diff utils.StorageDiff) (transformer.StorageTransformer, bool) {
storageTransformer, ok := storageWatcher.Transformers[row.Contract] storageTransformer, ok := storageWatcher.KeccakAddressTransformers[diff.HashedAddress]
return storageTransformer, ok
}
func (storageWatcher StorageWatcher) processRow(diff utils.StorageDiff) {
storageTransformer, ok := storageWatcher.getTransformer(diff)
if !ok { if !ok {
// ignore rows from unwatched contracts logrus.Debug("ignoring a diff from an unwatched contract")
return return
} }
executeErr := storageTransformer.Execute(row) executeErr := storageTransformer.Execute(diff)
if executeErr != nil { if executeErr != nil {
logrus.Warn(fmt.Sprintf("error executing storage transformer: %s", executeErr)) logrus.Warn(fmt.Sprintf("error executing storage transformer: %s", executeErr))
queueErr := storageWatcher.Queue.Add(row) queueErr := storageWatcher.Queue.Add(diff)
if queueErr != nil { if queueErr != nil {
logrus.Warn(fmt.Sprintf("error queueing storage diff: %s", queueErr)) logrus.Warn(fmt.Sprintf("error queueing storage diff: %s", queueErr))
} }
@ -88,20 +95,20 @@ func (storageWatcher StorageWatcher) processRow(row utils.StorageDiffRow) {
} }
func (storageWatcher StorageWatcher) processQueue() { func (storageWatcher StorageWatcher) processQueue() {
rows, fetchErr := storageWatcher.Queue.GetAll() diffs, fetchErr := storageWatcher.Queue.GetAll()
if fetchErr != nil { if fetchErr != nil {
logrus.Warn(fmt.Sprintf("error getting queued storage: %s", fetchErr)) logrus.Warn(fmt.Sprintf("error getting queued storage: %s", fetchErr))
} }
for _, row := range rows { for _, diff := range diffs {
storageTransformer, ok := storageWatcher.Transformers[row.Contract] storageTransformer, ok := storageWatcher.getTransformer(diff)
if !ok { if !ok {
// delete row from queue if address no longer watched // delete diff from queue if address no longer watched
storageWatcher.deleteRow(row.Id) storageWatcher.deleteRow(diff.Id)
continue continue
} }
executeErr := storageTransformer.Execute(row) executeErr := storageTransformer.Execute(diff)
if executeErr == nil { if executeErr == nil {
storageWatcher.deleteRow(row.Id) storageWatcher.deleteRow(diff.Id)
} }
} }
} }
@ -109,10 +116,6 @@ func (storageWatcher StorageWatcher) processQueue() {
func (storageWatcher StorageWatcher) deleteRow(id int) { func (storageWatcher StorageWatcher) deleteRow(id int) {
deleteErr := storageWatcher.Queue.Delete(id) deleteErr := storageWatcher.Queue.Delete(id)
if deleteErr != nil { if deleteErr != nil {
logrus.Warn(fmt.Sprintf("error deleting persisted row from queue: %s", deleteErr)) logrus.Warn(fmt.Sprintf("error deleting persisted diff from queue: %s", deleteErr))
} }
} }
func isKeyNotFound(executeErr error) bool {
return reflect.TypeOf(executeErr) == reflect.TypeOf(utils.ErrStorageKeyNotFound{})
}

View File

@ -17,55 +17,56 @@
package watcher_test package watcher_test
import ( import (
"io/ioutil"
"os"
"time"
"github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common"
. "github.com/onsi/ginkgo" . "github.com/onsi/ginkgo"
. "github.com/onsi/gomega" . "github.com/onsi/gomega"
"github.com/sirupsen/logrus" "github.com/sirupsen/logrus"
"github.com/vulcanize/vulcanizedb/libraries/shared/mocks" "github.com/vulcanize/vulcanizedb/libraries/shared/mocks"
"github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils" "github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils"
"github.com/vulcanize/vulcanizedb/libraries/shared/transformer" "github.com/vulcanize/vulcanizedb/libraries/shared/transformer"
"github.com/vulcanize/vulcanizedb/libraries/shared/watcher" "github.com/vulcanize/vulcanizedb/libraries/shared/watcher"
"github.com/vulcanize/vulcanizedb/pkg/fakes" "github.com/vulcanize/vulcanizedb/pkg/fakes"
"github.com/vulcanize/vulcanizedb/test_config" "github.com/vulcanize/vulcanizedb/test_config"
"io/ioutil"
"os"
"time"
) )
var _ = Describe("Storage Watcher", func() { var _ = Describe("Storage Watcher", func() {
Describe("AddTransformer", func() {
It("adds transformers", func() { It("adds transformers", func() {
fakeAddress := common.HexToAddress("0x12345") fakeHashedAddress := utils.HexToKeccak256Hash("0x12345")
fakeTransformer := &mocks.MockStorageTransformer{Address: fakeAddress} fakeTransformer := &mocks.MockStorageTransformer{KeccakOfAddress: fakeHashedAddress}
w := watcher.NewStorageWatcher(mocks.NewMockStorageFetcher(), test_config.NewTestDB(test_config.NewTestNode())) w := watcher.NewStorageWatcher(mocks.NewMockStorageFetcher(), test_config.NewTestDB(test_config.NewTestNode()))
w.AddTransformers([]transformer.StorageTransformerInitializer{fakeTransformer.FakeTransformerInitializer}) w.AddTransformers([]transformer.StorageTransformerInitializer{fakeTransformer.FakeTransformerInitializer})
Expect(w.Transformers[fakeAddress]).To(Equal(fakeTransformer)) Expect(w.KeccakAddressTransformers[fakeHashedAddress]).To(Equal(fakeTransformer))
})
}) })
Describe("executing watcher", func() { Describe("Execute", func() {
var ( var (
errs chan error errs chan error
mockFetcher *mocks.MockStorageFetcher mockFetcher *mocks.MockStorageFetcher
mockQueue *mocks.MockStorageQueue mockQueue *mocks.MockStorageQueue
mockTransformer *mocks.MockStorageTransformer mockTransformer *mocks.MockStorageTransformer
row utils.StorageDiffRow csvDiff utils.StorageDiff
rows chan utils.StorageDiffRow diffs chan utils.StorageDiff
storageWatcher watcher.StorageWatcher storageWatcher watcher.StorageWatcher
hashedAddress common.Hash
) )
BeforeEach(func() { BeforeEach(func() {
errs = make(chan error) errs = make(chan error)
rows = make(chan utils.StorageDiffRow) diffs = make(chan utils.StorageDiff)
address := common.HexToAddress("0x0123456789abcdef") hashedAddress = utils.HexToKeccak256Hash("0x0123456789abcdef")
mockFetcher = mocks.NewMockStorageFetcher() mockFetcher = mocks.NewMockStorageFetcher()
mockQueue = &mocks.MockStorageQueue{} mockQueue = &mocks.MockStorageQueue{}
mockTransformer = &mocks.MockStorageTransformer{Address: address} mockTransformer = &mocks.MockStorageTransformer{KeccakOfAddress: hashedAddress}
row = utils.StorageDiffRow{ csvDiff = utils.StorageDiff{
Id: 1337, Id: 1337,
Contract: address, HashedAddress: hashedAddress,
BlockHash: common.HexToHash("0xfedcba9876543210"), BlockHash: common.HexToHash("0xfedcba9876543210"),
BlockHeight: 0, BlockHeight: 0,
StorageKey: common.HexToHash("0xabcdef1234567890"), StorageKey: common.HexToHash("0xabcdef1234567890"),
@ -83,7 +84,7 @@ var _ = Describe("Storage Watcher", func() {
defer os.Remove(tempFile.Name()) defer os.Remove(tempFile.Name())
logrus.SetOutput(tempFile) logrus.SetOutput(tempFile)
go storageWatcher.Execute(rows, errs, time.Hour) go storageWatcher.Execute(diffs, errs, time.Hour)
Eventually(func() (string, error) { Eventually(func() (string, error) {
logContent, err := ioutil.ReadFile(tempFile.Name()) logContent, err := ioutil.ReadFile(tempFile.Name())
@ -92,39 +93,39 @@ var _ = Describe("Storage Watcher", func() {
close(done) close(done)
}) })
Describe("transforming new storage diffs", func() { Describe("transforming new storage diffs from csv", func() {
BeforeEach(func() { BeforeEach(func() {
mockFetcher.RowsToReturn = []utils.StorageDiffRow{row} mockFetcher.DiffsToReturn = []utils.StorageDiff{csvDiff}
storageWatcher = watcher.NewStorageWatcher(mockFetcher, test_config.NewTestDB(test_config.NewTestNode())) storageWatcher = watcher.NewStorageWatcher(mockFetcher, test_config.NewTestDB(test_config.NewTestNode()))
storageWatcher.Queue = mockQueue storageWatcher.Queue = mockQueue
storageWatcher.AddTransformers([]transformer.StorageTransformerInitializer{mockTransformer.FakeTransformerInitializer}) storageWatcher.AddTransformers([]transformer.StorageTransformerInitializer{mockTransformer.FakeTransformerInitializer})
}) })
It("executes transformer for recognized storage row", func(done Done) { It("executes transformer for recognized storage diff", func(done Done) {
go storageWatcher.Execute(rows, errs, time.Hour) go storageWatcher.Execute(diffs, errs, time.Hour)
Eventually(func() utils.StorageDiffRow { Eventually(func() utils.StorageDiff {
return mockTransformer.PassedRow return mockTransformer.PassedDiff
}).Should(Equal(row)) }).Should(Equal(csvDiff))
close(done) close(done)
}) })
It("queues row for later processing if transformer execution fails", func(done Done) { It("queues diff for later processing if transformer execution fails", func(done Done) {
mockTransformer.ExecuteErr = fakes.FakeError mockTransformer.ExecuteErr = fakes.FakeError
go storageWatcher.Execute(rows, errs, time.Hour) go storageWatcher.Execute(diffs, errs, time.Hour)
Expect(<-errs).To(BeNil()) Expect(<-errs).To(BeNil())
Eventually(func() bool { Eventually(func() bool {
return mockQueue.AddCalled return mockQueue.AddCalled
}).Should(BeTrue()) }).Should(BeTrue())
Eventually(func() utils.StorageDiffRow { Eventually(func() utils.StorageDiff {
return mockQueue.AddPassedRow return mockQueue.AddPassedDiff
}).Should(Equal(row)) }).Should(Equal(csvDiff))
close(done) close(done)
}) })
It("logs error if queueing row fails", func(done Done) { It("logs error if queueing diff fails", func(done Done) {
mockTransformer.ExecuteErr = utils.ErrStorageKeyNotFound{} mockTransformer.ExecuteErr = utils.ErrStorageKeyNotFound{}
mockQueue.AddError = fakes.FakeError mockQueue.AddError = fakes.FakeError
tempFile, fileErr := ioutil.TempFile("", "log") tempFile, fileErr := ioutil.TempFile("", "log")
@ -132,7 +133,7 @@ var _ = Describe("Storage Watcher", func() {
defer os.Remove(tempFile.Name()) defer os.Remove(tempFile.Name())
logrus.SetOutput(tempFile) logrus.SetOutput(tempFile)
go storageWatcher.Execute(rows, errs, time.Hour) go storageWatcher.Execute(diffs, errs, time.Hour)
Eventually(func() bool { Eventually(func() bool {
return mockQueue.AddCalled return mockQueue.AddCalled
@ -147,54 +148,38 @@ var _ = Describe("Storage Watcher", func() {
Describe("transforming queued storage diffs", func() { Describe("transforming queued storage diffs", func() {
BeforeEach(func() { BeforeEach(func() {
mockQueue.RowsToReturn = []utils.StorageDiffRow{row} mockQueue.DiffsToReturn = []utils.StorageDiff{csvDiff}
storageWatcher = watcher.NewStorageWatcher(mockFetcher, test_config.NewTestDB(test_config.NewTestNode())) storageWatcher = watcher.NewStorageWatcher(mockFetcher, test_config.NewTestDB(test_config.NewTestNode()))
storageWatcher.Queue = mockQueue storageWatcher.Queue = mockQueue
storageWatcher.AddTransformers([]transformer.StorageTransformerInitializer{mockTransformer.FakeTransformerInitializer}) storageWatcher.AddTransformers([]transformer.StorageTransformerInitializer{mockTransformer.FakeTransformerInitializer})
}) })
It("logs error if getting queued storage fails", func(done Done) { It("executes transformer for storage diff", func(done Done) {
mockQueue.GetAllErr = fakes.FakeError go storageWatcher.Execute(diffs, errs, time.Nanosecond)
tempFile, fileErr := ioutil.TempFile("", "log")
Expect(fileErr).NotTo(HaveOccurred())
defer os.Remove(tempFile.Name())
logrus.SetOutput(tempFile)
go storageWatcher.Execute(rows, errs, time.Nanosecond) Eventually(func() utils.StorageDiff {
return mockTransformer.PassedDiff
Eventually(func() (string, error) { }).Should(Equal(csvDiff))
logContent, err := ioutil.ReadFile(tempFile.Name())
return string(logContent), err
}).Should(ContainSubstring(fakes.FakeError.Error()))
close(done) close(done)
}) })
It("executes transformer for storage row", func(done Done) { It("deletes diff from queue if transformer execution successful", func(done Done) {
go storageWatcher.Execute(rows, errs, time.Nanosecond) go storageWatcher.Execute(diffs, errs, time.Nanosecond)
Eventually(func() utils.StorageDiffRow {
return mockTransformer.PassedRow
}).Should(Equal(row))
close(done)
})
It("deletes row from queue if transformer execution successful", func(done Done) {
go storageWatcher.Execute(rows, errs, time.Nanosecond)
Eventually(func() int { Eventually(func() int {
return mockQueue.DeletePassedId return mockQueue.DeletePassedId
}).Should(Equal(row.Id)) }).Should(Equal(csvDiff.Id))
close(done) close(done)
}) })
It("logs error if deleting persisted row fails", func(done Done) { It("logs error if deleting persisted diff fails", func(done Done) {
mockQueue.DeleteErr = fakes.FakeError mockQueue.DeleteErr = fakes.FakeError
tempFile, fileErr := ioutil.TempFile("", "log") tempFile, fileErr := ioutil.TempFile("", "log")
Expect(fileErr).NotTo(HaveOccurred()) Expect(fileErr).NotTo(HaveOccurred())
defer os.Remove(tempFile.Name()) defer os.Remove(tempFile.Name())
logrus.SetOutput(tempFile) logrus.SetOutput(tempFile)
go storageWatcher.Execute(rows, errs, time.Nanosecond) go storageWatcher.Execute(diffs, errs, time.Nanosecond)
Eventually(func() (string, error) { Eventually(func() (string, error) {
logContent, err := ioutil.ReadFile(tempFile.Name()) logContent, err := ioutil.ReadFile(tempFile.Name())
@ -203,34 +188,34 @@ var _ = Describe("Storage Watcher", func() {
close(done) close(done)
}) })
It("deletes obsolete row from queue if contract not recognized", func(done Done) { It("deletes obsolete diff from queue if contract not recognized", func(done Done) {
obsoleteRow := utils.StorageDiffRow{ obsoleteDiff := utils.StorageDiff{
Id: row.Id + 1, Id: csvDiff.Id + 1,
Contract: common.HexToAddress("0xfedcba9876543210"), HashedAddress: utils.HexToKeccak256Hash("0xfedcba9876543210"),
} }
mockQueue.RowsToReturn = []utils.StorageDiffRow{obsoleteRow} mockQueue.DiffsToReturn = []utils.StorageDiff{obsoleteDiff}
go storageWatcher.Execute(rows, errs, time.Nanosecond) go storageWatcher.Execute(diffs, errs, time.Nanosecond)
Eventually(func() int { Eventually(func() int {
return mockQueue.DeletePassedId return mockQueue.DeletePassedId
}).Should(Equal(obsoleteRow.Id)) }).Should(Equal(obsoleteDiff.Id))
close(done) close(done)
}) })
It("logs error if deleting obsolete row fails", func(done Done) { It("logs error if deleting obsolete diff fails", func(done Done) {
obsoleteRow := utils.StorageDiffRow{ obsoleteDiff := utils.StorageDiff{
Id: row.Id + 1, Id: csvDiff.Id + 1,
Contract: common.HexToAddress("0xfedcba9876543210"), HashedAddress: utils.HexToKeccak256Hash("0xfedcba9876543210"),
} }
mockQueue.RowsToReturn = []utils.StorageDiffRow{obsoleteRow} mockQueue.DiffsToReturn = []utils.StorageDiff{obsoleteDiff}
mockQueue.DeleteErr = fakes.FakeError mockQueue.DeleteErr = fakes.FakeError
tempFile, fileErr := ioutil.TempFile("", "log") tempFile, fileErr := ioutil.TempFile("", "log")
Expect(fileErr).NotTo(HaveOccurred()) Expect(fileErr).NotTo(HaveOccurred())
defer os.Remove(tempFile.Name()) defer os.Remove(tempFile.Name())
logrus.SetOutput(tempFile) logrus.SetOutput(tempFile)
go storageWatcher.Execute(rows, errs, time.Nanosecond) go storageWatcher.Execute(diffs, errs, time.Nanosecond)
Eventually(func() (string, error) { Eventually(func() (string, error) {
logContent, err := ioutil.ReadFile(tempFile.Name()) logContent, err := ioutil.ReadFile(tempFile.Name())
@ -239,6 +224,5 @@ var _ = Describe("Storage Watcher", func() {
close(done) close(done)
}) })
}) })
}) })
}) })

View File

@ -92,5 +92,5 @@ func (e Event) Sig() common.Hash {
types[i] = input.Type.String() types[i] = input.Type.String()
} }
return common.BytesToHash(crypto.Keccak256([]byte(fmt.Sprintf("%v(%v)", e.Name, strings.Join(types, ","))))) return crypto.Keccak256Hash([]byte(fmt.Sprintf("%v(%v)", e.Name, strings.Join(types, ","))))
} }

View File

@ -107,5 +107,5 @@ func (m Method) Sig() common.Hash {
i++ i++
} }
return common.BytesToHash(crypto.Keccak256([]byte(fmt.Sprintf("%v(%v)", m.Name, strings.Join(types, ","))))) return crypto.Keccak256Hash([]byte(fmt.Sprintf("%v(%v)", m.Name, strings.Join(types, ","))))
} }

View File

@ -18,6 +18,7 @@ package core
import ( import (
"context" "context"
"github.com/ethereum/go-ethereum/rpc"
"github.com/vulcanize/vulcanizedb/pkg/geth/client" "github.com/vulcanize/vulcanizedb/pkg/geth/client"
) )
@ -27,4 +28,5 @@ type RpcClient interface {
BatchCall(batch []client.BatchElem) error BatchCall(batch []client.BatchElem) error
IpcPath() string IpcPath() string
SupportedModules() (map[string]string, error) SupportedModules() (map[string]string, error)
Subscribe(namespace string, payloadChan interface{}, args ...interface{}) (*rpc.ClientSubscription, error)
} }

View File

@ -18,10 +18,13 @@ package fakes
import ( import (
"context" "context"
"errors"
"github.com/ethereum/go-ethereum/statediff"
"math/big" "math/big"
"github.com/ethereum/go-ethereum/core/types" "github.com/ethereum/go-ethereum/core/types"
"github.com/ethereum/go-ethereum/p2p" "github.com/ethereum/go-ethereum/p2p"
"github.com/ethereum/go-ethereum/rpc"
. "github.com/onsi/gomega" . "github.com/onsi/gomega"
"github.com/vulcanize/vulcanizedb/pkg/core" "github.com/vulcanize/vulcanizedb/pkg/core"
@ -36,6 +39,9 @@ type MockRpcClient struct {
passedMethod string passedMethod string
passedResult interface{} passedResult interface{}
passedBatch []client.BatchElem passedBatch []client.BatchElem
passedNamespace string
passedPayloadChan chan statediff.Payload
passedSubscribeArgs []interface{}
lengthOfBatch int lengthOfBatch int
returnPOAHeader core.POAHeader returnPOAHeader core.POAHeader
returnPOAHeaders []core.POAHeader returnPOAHeaders []core.POAHeader
@ -43,6 +49,29 @@ type MockRpcClient struct {
supportedModules map[string]string supportedModules map[string]string
} }
func (client *MockRpcClient) Subscribe(namespace string, payloadChan interface{}, args ...interface{}) (*rpc.ClientSubscription, error) {
client.passedNamespace = namespace
passedPayloadChan, ok := payloadChan.(chan statediff.Payload)
if !ok {
return nil, errors.New("passed in channel is not of the correct type")
}
client.passedPayloadChan = passedPayloadChan
for _, arg := range args {
client.passedSubscribeArgs = append(client.passedSubscribeArgs, arg)
}
subscription := rpc.ClientSubscription{}
return &subscription, nil
}
func (client *MockRpcClient) AssertSubscribeCalledWith(namespace string, payloadChan chan statediff.Payload, args []interface{}) {
Expect(client.passedNamespace).To(Equal(namespace))
Expect(client.passedPayloadChan).To(Equal(payloadChan))
Expect(client.passedSubscribeArgs).To(Equal(args))
}
func NewMockRpcClient() *MockRpcClient { func NewMockRpcClient() *MockRpcClient {
return &MockRpcClient{} return &MockRpcClient{}
} }

View File

@ -18,6 +18,9 @@ package client
import ( import (
"context" "context"
"errors"
"reflect"
"github.com/ethereum/go-ethereum/rpc" "github.com/ethereum/go-ethereum/rpc"
) )
@ -73,3 +76,16 @@ func (client RpcClient) BatchCall(batch []BatchElem) error {
} }
return client.client.BatchCall(rpcBatch) return client.client.BatchCall(rpcBatch)
} }
// Subscribe subscribes to an rpc "namespace_subscribe" subscription with the given channel
// The first argument needs to be the method we wish to invoke
func (client RpcClient) Subscribe(namespace string, payloadChan interface{}, args ...interface{}) (*rpc.ClientSubscription, error) {
chanVal := reflect.ValueOf(payloadChan)
if chanVal.Kind() != reflect.Chan || chanVal.Type().ChanDir()&reflect.SendDir == 0 {
return nil, errors.New("second argument to Subscribe must be a writable channel")
}
if chanVal.IsNil() {
return nil, errors.New("channel given to Subscribe must not be nil")
}
return client.client.Subscribe(context.Background(), namespace, payloadChan, args...)
}