commit
c318b1c83d
2
.gitignore
vendored
2
.gitignore
vendored
@ -7,3 +7,5 @@ environments/*.toml
|
|||||||
Vagrantfile
|
Vagrantfile
|
||||||
vagrant_bootstrap.sh
|
vagrant_bootstrap.sh
|
||||||
.vagrant
|
.vagrant
|
||||||
|
test_scripts/
|
||||||
|
vulcanizedb
|
||||||
|
@ -7,12 +7,11 @@ services:
|
|||||||
addons:
|
addons:
|
||||||
postgresql: "9.6"
|
postgresql: "9.6"
|
||||||
|
|
||||||
|
go_import_path: github.com/vulcanize/vulcanizedb
|
||||||
|
|
||||||
before_install:
|
before_install:
|
||||||
# godo
|
|
||||||
- go get -u gopkg.in/godo.v2/cmd/godo
|
|
||||||
# dep
|
# dep
|
||||||
- go get -u github.com/golang/dep/cmd/dep
|
- go get -u github.com/golang/dep/cmd/dep
|
||||||
- dep ensure
|
|
||||||
# ginkgo
|
# ginkgo
|
||||||
- go get -u github.com/onsi/ginkgo/ginkgo
|
- go get -u github.com/onsi/ginkgo/ginkgo
|
||||||
# migrate
|
# migrate
|
||||||
@ -30,7 +29,7 @@ install:
|
|||||||
before_script:
|
before_script:
|
||||||
- ./scripts/setup
|
- ./scripts/setup
|
||||||
- nohup ./scripts/start_private_blockchain </dev/null &
|
- nohup ./scripts/start_private_blockchain </dev/null &
|
||||||
- godo migrate -- -e private
|
- make migrate HOST_NAME=localhost NAME=vulcanize_private PORT=5432
|
||||||
|
|
||||||
script:
|
script:
|
||||||
- ginkgo -r
|
- ginkgo -r
|
||||||
|
109
Gododir/main.go
109
Gododir/main.go
@ -1,109 +0,0 @@
|
|||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"log"
|
|
||||||
|
|
||||||
"fmt"
|
|
||||||
|
|
||||||
"github.com/vulcanize/vulcanizedb/cmd"
|
|
||||||
"github.com/vulcanize/vulcanizedb/pkg/config"
|
|
||||||
do "gopkg.in/godo.v2"
|
|
||||||
)
|
|
||||||
|
|
||||||
func parseEnvironment(context *do.Context) string {
|
|
||||||
environment := context.Args.MayString("", "environment", "env", "e")
|
|
||||||
if environment == "" {
|
|
||||||
log.Fatalln("--environment required")
|
|
||||||
}
|
|
||||||
return environment
|
|
||||||
}
|
|
||||||
|
|
||||||
func tasks(p *do.Project) {
|
|
||||||
|
|
||||||
p.Task("run", nil, func(context *do.Context) {
|
|
||||||
environment := parseEnvironment(context)
|
|
||||||
context.Start(`go run main.go --environment={{.environment}}`,
|
|
||||||
do.M{"environment": environment, "$in": "cmd/run"})
|
|
||||||
})
|
|
||||||
|
|
||||||
p.Task("vulcanizeDb", nil, func(context *do.Context) {
|
|
||||||
environment := parseEnvironment(context)
|
|
||||||
context.Start(`go run main.go --environment={{.environment}}`,
|
|
||||||
do.M{"environment": environment, "$in": "cmd/vulcanize_db"})
|
|
||||||
})
|
|
||||||
|
|
||||||
p.Task("populateBlocks", nil, func(context *do.Context) {
|
|
||||||
environment := parseEnvironment(context)
|
|
||||||
startingNumber := context.Args.MayInt(-1, "starting-number")
|
|
||||||
if startingNumber < 0 {
|
|
||||||
log.Fatalln("--starting-number required")
|
|
||||||
}
|
|
||||||
context.Start(`go run main.go --environment={{.environment}} --starting-number={{.startingNumber}}`,
|
|
||||||
do.M{"environment": environment, "startingNumber": startingNumber, "$in": "cmd/populate_blocks"})
|
|
||||||
})
|
|
||||||
|
|
||||||
p.Task("getLogs", nil, func(context *do.Context) {
|
|
||||||
environment := parseEnvironment(context)
|
|
||||||
contractHash := context.Args.MayString("", "contract-hash", "c")
|
|
||||||
if contractHash == "" {
|
|
||||||
log.Fatalln("--contract-hash required")
|
|
||||||
}
|
|
||||||
context.Start(`go run main.go --environment={{.environment}} --contract-hash={{.contractHash}}`,
|
|
||||||
do.M{
|
|
||||||
"environment": environment,
|
|
||||||
"contractHash": contractHash,
|
|
||||||
"$in": "cmd/get_logs",
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
p.Task("watchContract", nil, func(context *do.Context) {
|
|
||||||
environment := parseEnvironment(context)
|
|
||||||
contractHash := context.Args.MayString("", "contract-hash", "c")
|
|
||||||
abiFilepath := context.Args.MayString("", "abi-filepath", "a")
|
|
||||||
if contractHash == "" {
|
|
||||||
log.Fatalln("--contract-hash required")
|
|
||||||
}
|
|
||||||
context.Start(`go run main.go --environment={{.environment}} --contract-hash={{.contractHash}} --abi-filepath={{.abiFilepath}}`,
|
|
||||||
do.M{
|
|
||||||
"environment": environment,
|
|
||||||
"contractHash": contractHash,
|
|
||||||
"abiFilepath": abiFilepath,
|
|
||||||
"$in": "cmd/watch_contract",
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
p.Task("migrate", nil, func(context *do.Context) {
|
|
||||||
environment := parseEnvironment(context)
|
|
||||||
cfg := cmd.LoadConfig(environment)
|
|
||||||
connectString := config.DbConnectionString(cfg.Database)
|
|
||||||
migrate := fmt.Sprintf("migrate -database '%s' -path ./db/migrations up", connectString)
|
|
||||||
context.Bash(migrate)
|
|
||||||
})
|
|
||||||
|
|
||||||
p.Task("rollback", nil, func(context *do.Context) {
|
|
||||||
environment := parseEnvironment(context)
|
|
||||||
cfg := cmd.LoadConfig(environment)
|
|
||||||
connectString := config.DbConnectionString(cfg.Database)
|
|
||||||
migrate := fmt.Sprintf("migrate -database '%s' -path ./db/migrations down 1", connectString)
|
|
||||||
context.Bash(migrate)
|
|
||||||
})
|
|
||||||
|
|
||||||
p.Task("showContractSummary", nil, func(context *do.Context) {
|
|
||||||
environment := parseEnvironment(context)
|
|
||||||
contractHash := context.Args.MayString("", "contract-hash", "c")
|
|
||||||
blockNumber := context.Args.MayInt(-1, "block-number", "b")
|
|
||||||
if contractHash == "" {
|
|
||||||
log.Fatalln("--contract-hash required")
|
|
||||||
}
|
|
||||||
context.Start(`go run main.go --environment={{.environment}} --contract-hash={{.contractHash}} --block-number={{.blockNumber}}`,
|
|
||||||
do.M{"environment": environment,
|
|
||||||
"contractHash": contractHash,
|
|
||||||
"blockNumber": blockNumber,
|
|
||||||
"$in": "cmd/show_contract_summary"})
|
|
||||||
})
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
do.Godo(tasks)
|
|
||||||
}
|
|
122
Gopkg.lock
generated
122
Gopkg.lock
generated
@ -7,12 +7,6 @@
|
|||||||
revision = "b26d9c308763d68093482582cea63d69be07a0f0"
|
revision = "b26d9c308763d68093482582cea63d69be07a0f0"
|
||||||
version = "v0.3.0"
|
version = "v0.3.0"
|
||||||
|
|
||||||
[[projects]]
|
|
||||||
branch = "master"
|
|
||||||
name = "github.com/MichaelTJones/walk"
|
|
||||||
packages = ["."]
|
|
||||||
revision = "4748e29d5718c2df4028a6543edf86fd8cc0f881"
|
|
||||||
|
|
||||||
[[projects]]
|
[[projects]]
|
||||||
branch = "master"
|
branch = "master"
|
||||||
name = "github.com/aristanetworks/goarista"
|
name = "github.com/aristanetworks/goarista"
|
||||||
@ -31,6 +25,12 @@
|
|||||||
revision = "4bb3c89d44e372e6a9ab85a8be0c9345265c763a"
|
revision = "4bb3c89d44e372e6a9ab85a8be0c9345265c763a"
|
||||||
version = "v1.7.3"
|
version = "v1.7.3"
|
||||||
|
|
||||||
|
[[projects]]
|
||||||
|
name = "github.com/fsnotify/fsnotify"
|
||||||
|
packages = ["."]
|
||||||
|
revision = "c2828203cd70a50dcccfb2761f8b1f8ceef9a8e9"
|
||||||
|
version = "v1.4.7"
|
||||||
|
|
||||||
[[projects]]
|
[[projects]]
|
||||||
name = "github.com/go-stack/stack"
|
name = "github.com/go-stack/stack"
|
||||||
packages = ["."]
|
packages = ["."]
|
||||||
@ -51,9 +51,9 @@
|
|||||||
|
|
||||||
[[projects]]
|
[[projects]]
|
||||||
branch = "master"
|
branch = "master"
|
||||||
name = "github.com/howeyc/gopass"
|
name = "github.com/hashicorp/hcl"
|
||||||
packages = ["."]
|
packages = [".","hcl/ast","hcl/parser","hcl/scanner","hcl/strconv","hcl/token","json/parser","json/scanner","json/token"]
|
||||||
revision = "bf9dde6d0d2c004a008c27aaee91170c786f6db8"
|
revision = "23c074d0eceb2b8a5bfdbb271ab780cde70f05a8"
|
||||||
|
|
||||||
[[projects]]
|
[[projects]]
|
||||||
branch = "master"
|
branch = "master"
|
||||||
@ -61,6 +61,12 @@
|
|||||||
packages = [".","dcps/internetgateway1","dcps/internetgateway2","httpu","scpd","soap","ssdp"]
|
packages = [".","dcps/internetgateway1","dcps/internetgateway2","httpu","scpd","soap","ssdp"]
|
||||||
revision = "dceda08e705b2acee36aab47d765ed801f64cfc7"
|
revision = "dceda08e705b2acee36aab47d765ed801f64cfc7"
|
||||||
|
|
||||||
|
[[projects]]
|
||||||
|
name = "github.com/inconshreveable/mousetrap"
|
||||||
|
packages = ["."]
|
||||||
|
revision = "76626ae9c91c4f2a10f34cad8ce83ea42c93bb75"
|
||||||
|
version = "v1.0"
|
||||||
|
|
||||||
[[projects]]
|
[[projects]]
|
||||||
name = "github.com/jackpal/go-nat-pmp"
|
name = "github.com/jackpal/go-nat-pmp"
|
||||||
packages = ["."]
|
packages = ["."]
|
||||||
@ -80,46 +86,22 @@
|
|||||||
revision = "83612a56d3dd153a94a629cd64925371c9adad78"
|
revision = "83612a56d3dd153a94a629cd64925371c9adad78"
|
||||||
|
|
||||||
[[projects]]
|
[[projects]]
|
||||||
name = "github.com/mattn/go-colorable"
|
name = "github.com/magiconair/properties"
|
||||||
packages = ["."]
|
packages = ["."]
|
||||||
revision = "167de6bfdfba052fa6b2d3664c8f5272e23c9072"
|
revision = "d419a98cdbed11a922bf76f257b7c4be79b50e73"
|
||||||
version = "v0.0.9"
|
version = "v1.7.4"
|
||||||
|
|
||||||
[[projects]]
|
|
||||||
name = "github.com/mattn/go-isatty"
|
|
||||||
packages = ["."]
|
|
||||||
revision = "0360b2af4f38e8d38c7fce2a9f4e702702d73a39"
|
|
||||||
version = "v0.0.3"
|
|
||||||
|
|
||||||
[[projects]]
|
[[projects]]
|
||||||
branch = "master"
|
branch = "master"
|
||||||
name = "github.com/mgutz/ansi"
|
name = "github.com/mitchellh/go-homedir"
|
||||||
packages = ["."]
|
packages = ["."]
|
||||||
revision = "9520e82c474b0a04dd04f8a40959027271bab992"
|
revision = "b8bc1bf767474819792c23f32d8286a45736f1c6"
|
||||||
|
|
||||||
[[projects]]
|
[[projects]]
|
||||||
branch = "master"
|
branch = "master"
|
||||||
name = "github.com/mgutz/minimist"
|
name = "github.com/mitchellh/mapstructure"
|
||||||
packages = ["."]
|
packages = ["."]
|
||||||
revision = "39eb8cf573ca29344bd7d7e6ba4d7febdebd37a9"
|
revision = "b4575eea38cca1123ec2dc90c26529b5c5acfcff"
|
||||||
|
|
||||||
[[projects]]
|
|
||||||
name = "github.com/mgutz/str"
|
|
||||||
packages = ["."]
|
|
||||||
revision = "968bf66e3da857419e4f6e71b2d5c9ae95682dc4"
|
|
||||||
version = "v1.2.0"
|
|
||||||
|
|
||||||
[[projects]]
|
|
||||||
name = "github.com/mgutz/to"
|
|
||||||
packages = ["."]
|
|
||||||
revision = "00c06406c2dd2e011f153a6502a21473676db33f"
|
|
||||||
version = "v1.0.0"
|
|
||||||
|
|
||||||
[[projects]]
|
|
||||||
name = "github.com/nozzle/throttler"
|
|
||||||
packages = ["."]
|
|
||||||
revision = "d9b45f19996c645d38c9266d1f5cf1990e930119"
|
|
||||||
version = "v1.0"
|
|
||||||
|
|
||||||
[[projects]]
|
[[projects]]
|
||||||
name = "github.com/onsi/ginkgo"
|
name = "github.com/onsi/ginkgo"
|
||||||
@ -133,6 +115,12 @@
|
|||||||
revision = "c893efa28eb45626cdaa76c9f653b62488858837"
|
revision = "c893efa28eb45626cdaa76c9f653b62488858837"
|
||||||
version = "v1.2.0"
|
version = "v1.2.0"
|
||||||
|
|
||||||
|
[[projects]]
|
||||||
|
name = "github.com/pelletier/go-toml"
|
||||||
|
packages = ["."]
|
||||||
|
revision = "acdc4509485b587f5e675510c4f2c63e90ff68a8"
|
||||||
|
version = "v1.1.0"
|
||||||
|
|
||||||
[[projects]]
|
[[projects]]
|
||||||
branch = "master"
|
branch = "master"
|
||||||
name = "github.com/rcrowley/go-metrics"
|
name = "github.com/rcrowley/go-metrics"
|
||||||
@ -145,18 +133,48 @@
|
|||||||
revision = "7af7a1e09ba336d2ea14b1ce73bf693c6837dbf6"
|
revision = "7af7a1e09ba336d2ea14b1ce73bf693c6837dbf6"
|
||||||
version = "v1.2"
|
version = "v1.2"
|
||||||
|
|
||||||
|
[[projects]]
|
||||||
|
name = "github.com/spf13/afero"
|
||||||
|
packages = [".","mem"]
|
||||||
|
revision = "bb8f1927f2a9d3ab41c9340aa034f6b803f4359c"
|
||||||
|
version = "v1.0.2"
|
||||||
|
|
||||||
|
[[projects]]
|
||||||
|
name = "github.com/spf13/cast"
|
||||||
|
packages = ["."]
|
||||||
|
revision = "acbeb36b902d72a7a4c18e8f3241075e7ab763e4"
|
||||||
|
version = "v1.1.0"
|
||||||
|
|
||||||
|
[[projects]]
|
||||||
|
name = "github.com/spf13/cobra"
|
||||||
|
packages = ["."]
|
||||||
|
revision = "7b2c5ac9fc04fc5efafb60700713d4fa609b777b"
|
||||||
|
version = "v0.0.1"
|
||||||
|
|
||||||
|
[[projects]]
|
||||||
|
branch = "master"
|
||||||
|
name = "github.com/spf13/jwalterweatherman"
|
||||||
|
packages = ["."]
|
||||||
|
revision = "7c0cea34c8ece3fbeb2b27ab9b59511d360fb394"
|
||||||
|
|
||||||
|
[[projects]]
|
||||||
|
name = "github.com/spf13/pflag"
|
||||||
|
packages = ["."]
|
||||||
|
revision = "e57e3eeb33f795204c1ca35f56c44f83227c6e66"
|
||||||
|
version = "v1.0.0"
|
||||||
|
|
||||||
|
[[projects]]
|
||||||
|
name = "github.com/spf13/viper"
|
||||||
|
packages = ["."]
|
||||||
|
revision = "25b30aa063fc18e48662b86996252eabdcf2f0c7"
|
||||||
|
version = "v1.0.0"
|
||||||
|
|
||||||
[[projects]]
|
[[projects]]
|
||||||
branch = "master"
|
branch = "master"
|
||||||
name = "github.com/syndtr/goleveldb"
|
name = "github.com/syndtr/goleveldb"
|
||||||
packages = ["leveldb","leveldb/cache","leveldb/comparer","leveldb/errors","leveldb/filter","leveldb/iterator","leveldb/journal","leveldb/memdb","leveldb/opt","leveldb/storage","leveldb/table","leveldb/util"]
|
packages = ["leveldb","leveldb/cache","leveldb/comparer","leveldb/errors","leveldb/filter","leveldb/iterator","leveldb/journal","leveldb/memdb","leveldb/opt","leveldb/storage","leveldb/table","leveldb/util"]
|
||||||
revision = "adf24ef3f94bd13ec4163060b21a5678f22b429b"
|
revision = "adf24ef3f94bd13ec4163060b21a5678f22b429b"
|
||||||
|
|
||||||
[[projects]]
|
|
||||||
branch = "master"
|
|
||||||
name = "golang.org/x/crypto"
|
|
||||||
packages = ["ssh/terminal"]
|
|
||||||
revision = "94eea52f7b742c7cbe0b03b22f0c4c8631ece122"
|
|
||||||
|
|
||||||
[[projects]]
|
[[projects]]
|
||||||
branch = "master"
|
branch = "master"
|
||||||
name = "golang.org/x/net"
|
name = "golang.org/x/net"
|
||||||
@ -166,13 +184,13 @@
|
|||||||
[[projects]]
|
[[projects]]
|
||||||
branch = "master"
|
branch = "master"
|
||||||
name = "golang.org/x/sys"
|
name = "golang.org/x/sys"
|
||||||
packages = ["unix","windows"]
|
packages = ["unix"]
|
||||||
revision = "a0f4589a76f1f83070cb9e5613809e1d07b97c13"
|
revision = "a0f4589a76f1f83070cb9e5613809e1d07b97c13"
|
||||||
|
|
||||||
[[projects]]
|
[[projects]]
|
||||||
branch = "master"
|
branch = "master"
|
||||||
name = "golang.org/x/text"
|
name = "golang.org/x/text"
|
||||||
packages = ["encoding","encoding/charmap","encoding/htmlindex","encoding/internal","encoding/internal/identifier","encoding/japanese","encoding/korean","encoding/simplifiedchinese","encoding/traditionalchinese","encoding/unicode","internal/gen","internal/tag","internal/utf8internal","language","runes","transform","unicode/cldr"]
|
packages = ["encoding","encoding/charmap","encoding/htmlindex","encoding/internal","encoding/internal/identifier","encoding/japanese","encoding/korean","encoding/simplifiedchinese","encoding/traditionalchinese","encoding/unicode","internal/gen","internal/tag","internal/triegen","internal/ucd","internal/utf8internal","language","runes","transform","unicode/cldr","unicode/norm"]
|
||||||
revision = "be25de41fadfae372d6470bda81ca6beb55ef551"
|
revision = "be25de41fadfae372d6470bda81ca6beb55ef551"
|
||||||
|
|
||||||
[[projects]]
|
[[projects]]
|
||||||
@ -181,12 +199,6 @@
|
|||||||
revision = "57907de300222151a123d29255ed17f5ed43fad3"
|
revision = "57907de300222151a123d29255ed17f5ed43fad3"
|
||||||
version = "v0.1.0"
|
version = "v0.1.0"
|
||||||
|
|
||||||
[[projects]]
|
|
||||||
name = "gopkg.in/godo.v2"
|
|
||||||
packages = [".","glob","util","watcher","watcher/fswatch"]
|
|
||||||
revision = "b5fd2f0bef1ebe832e628cfad18ab1cc707f65a1"
|
|
||||||
version = "v2.0.9"
|
|
||||||
|
|
||||||
[[projects]]
|
[[projects]]
|
||||||
branch = "v2"
|
branch = "v2"
|
||||||
name = "gopkg.in/karalabe/cookiejar.v2"
|
name = "gopkg.in/karalabe/cookiejar.v2"
|
||||||
@ -208,6 +220,6 @@
|
|||||||
[solve-meta]
|
[solve-meta]
|
||||||
analyzer-name = "dep"
|
analyzer-name = "dep"
|
||||||
analyzer-version = 1
|
analyzer-version = 1
|
||||||
inputs-digest = "2d7b9c5c88a94f3384b0cd754d35a3d7822a5858f439aaafe8c6477fb7c24f63"
|
inputs-digest = "641a04f7f89572adf1ebd051d0839eb5b03fdc67bb50838bfac98832246636f0"
|
||||||
solver-name = "gps-cdcl"
|
solver-name = "gps-cdcl"
|
||||||
solver-version = 1
|
solver-version = 1
|
||||||
|
@ -32,3 +32,7 @@
|
|||||||
[[constraint]]
|
[[constraint]]
|
||||||
branch = "master"
|
branch = "master"
|
||||||
name = "github.com/lib/pq"
|
name = "github.com/lib/pq"
|
||||||
|
|
||||||
|
[[constraint]]
|
||||||
|
name = "github.com/spf13/cobra"
|
||||||
|
version = "0.0.1"
|
||||||
|
51
Makefile
Normal file
51
Makefile
Normal file
@ -0,0 +1,51 @@
|
|||||||
|
HOST_NAME =
|
||||||
|
PORT =
|
||||||
|
NAME =
|
||||||
|
CONNECT_STRING=postgresql://$(HOST_NAME):$(PORT)/$(NAME)?sslmode=disable
|
||||||
|
|
||||||
|
$(MATTESMIGRATE):
|
||||||
|
go get -u -d github.com/mattes/migrate/cli github.com/lib/pq
|
||||||
|
go build -tags 'postgres' -o /usr/local/bin/migrate github.com/mattes/migrate/cli
|
||||||
|
|
||||||
|
$(DEP):
|
||||||
|
go get -u github.com/golang/dep/cmd/dep
|
||||||
|
|
||||||
|
$(GINKGO):
|
||||||
|
go get -u github.com/onsi/ginkgo/ginkgo
|
||||||
|
|
||||||
|
checkdbvars:
|
||||||
|
test -n "$(HOST_NAME)" # $$HOST_NAME
|
||||||
|
test -n "$(PORT)" # $$PORT
|
||||||
|
test -n "$(NAME)" # $$NAME
|
||||||
|
|
||||||
|
rollback: checkdbvars
|
||||||
|
migrate -database $(CONNECT_STRING) -path ./db/migrations down 1
|
||||||
|
pg_dump -O -s $(CONNECT_STRING) > db/schema.sql
|
||||||
|
|
||||||
|
migrate: $(MATTESMIGRATE) checkdbvars
|
||||||
|
migrate -database $(CONNECT_STRING) -path ./db/migrations up
|
||||||
|
pg_dump -O -s $(CONNECT_STRING) > db/schema.sql
|
||||||
|
|
||||||
|
import:
|
||||||
|
test -n "$(NAME)" # $$NAME
|
||||||
|
psql $(NAME) < db/schema.sql
|
||||||
|
|
||||||
|
dep: $(DEP)
|
||||||
|
dep ensure
|
||||||
|
|
||||||
|
build: dep
|
||||||
|
go build
|
||||||
|
|
||||||
|
test: $(GINKGO)
|
||||||
|
ginkgo -r
|
||||||
|
|
||||||
|
createprivate:
|
||||||
|
#!/bin/bash
|
||||||
|
echo "Deleting test blockchain"
|
||||||
|
rm -rf test_data_dir
|
||||||
|
echo "Creating test blockchain with a new account"
|
||||||
|
mkdir test_data_dir
|
||||||
|
geth --dev --datadir test_data_dir --password .private_blockchain_password account new
|
||||||
|
|
||||||
|
startprivate: createprivate
|
||||||
|
geth --datadir test_data_dir --dev --nodiscover --mine --minerthreads 1 --maxpeers 0 --verbosity 3 --unlock 0 --password .private_blockchain_password --rpc
|
118
README.md
118
README.md
@ -1,109 +1,54 @@
|
|||||||
# Vulcanize DB
|
# Vulcanize DB
|
||||||
|
|
||||||
[![Build Status](https://travis-ci.com/vulcanize/vulcanizedb.svg?token=GKv2Y33qsFnfYgejjvYx&branch=master)](https://travis-ci.com/vulcanize/vulcanizedb)
|
[![Build Status](https://travis-ci.com/8thlight/vulcanizedb.svg?token=3psFYN2533rYjhRbvjte&branch=master)](https://travis-ci.com/8thlight/vulcanizedb)
|
||||||
|
|
||||||
## Development Setup
|
|
||||||
|
|
||||||
### Dependencies
|
### Dependencies
|
||||||
|
|
||||||
- Go 1.9+
|
- Go 1.9+
|
||||||
- https://github.com/golang/dep
|
|
||||||
- `go get -u github.com/golang/dep/cmd/dep`
|
|
||||||
- https://github.com/go-godo/godo
|
|
||||||
- `go get -u gopkg.in/godo.v2/cmd/godo`
|
|
||||||
- Postgres 10
|
- Postgres 10
|
||||||
- Go Ethereum
|
- Go Ethereum
|
||||||
- https://ethereum.github.io/go-ethereum/downloads/
|
- https://ethereum.github.io/go-ethereum/downloads/
|
||||||
|
|
||||||
|
### Installation
|
||||||
|
go get github.com/vulcanize/vulcanizedb
|
||||||
|
|
||||||
### Cloning the Repository
|
|
||||||
|
|
||||||
1. `git config --global url."git@github.com:".insteadOf "https://github.com/"`
|
|
||||||
- By default, `go get` does not work for private GitHub repos. This will fix that.
|
|
||||||
2. `go get github.com/vulcanize/vulcanizedb`
|
|
||||||
3. `cd $GOPATH/src/github.com/vulcanize/vulcanizedb`
|
|
||||||
4. `dep ensure`
|
|
||||||
|
|
||||||
### Setting up the Databases
|
### Setting up the Databases
|
||||||
|
|
||||||
1. Install Postgres
|
1. Install Postgres
|
||||||
2. Create a superuser for yourself and make sure `psql --list` works without prompting for a password.
|
2. Create a superuser for yourself and make sure `psql --list` works without prompting for a password.
|
||||||
3. `go get -u -d github.com/mattes/migrate/cli github.com/lib/pq`
|
3. `createdb vulcanize_private`
|
||||||
4. `go build -tags 'postgres' -o /usr/local/bin/migrate github.com/mattes/migrate/cli`
|
4. `cd $GOPATH/src/github.com/vulcanize/vulcanizedb`
|
||||||
5. `createdb vulcanize_private`
|
5. Import the schema: `psql vulcanize_private < db/schema.sql`
|
||||||
6. `cd $GOPATH/src/github.com/vulcanize/vulcanizedb`
|
|
||||||
7. `godo migrate -- --environment=<some-environment>`
|
or run the migrations: `make migrate HOST_NAME=localhost NAME=vulcanize_public PORT=5432`
|
||||||
* See below for configuring additional environments
|
* See below for configuring additional environments
|
||||||
|
|
||||||
Adding a new migration: `./scripts/create_migration <migration-name>`
|
|
||||||
|
|
||||||
### Creating/Using a Private Blockchain
|
|
||||||
|
|
||||||
Syncing the public blockchain takes many hours for the initial sync and will download 20+ GB of data.
|
|
||||||
Here are some instructions for creating a private blockchain that does not depend on having a network connection.
|
|
||||||
|
|
||||||
1. Run `./scripts/setup` to create a private blockchain with a new account.
|
|
||||||
* This will result in a warning.
|
|
||||||
2. Run `./scripts/start_private_blockchain`.
|
|
||||||
3. Run `godo run -- --environment=private` to start listener.
|
|
||||||
|
|
||||||
### Connecting to the Public Blockchain
|
|
||||||
|
|
||||||
`./scripts/start_blockchain`
|
|
||||||
|
|
||||||
### IPC File Paths
|
### IPC File Paths
|
||||||
|
|
||||||
The default location for Ethereum is:
|
The default location for Ethereum is:
|
||||||
- `$HOME/Library/Ethereum` for Mac
|
- `$HOME/Library/Ethereum` for Mac
|
||||||
- `$HOME/.ethereum` for Ubuntu
|
- `$HOME/.ethereum` for Ubuntu
|
||||||
- `$GOPATH/src/gihub.com/vulcanize/vulcanizedb/test_data_dir/geth.ipc` for private blockchain.
|
- `$GOPATH/src/gihub.com/vulcanize/vulcanizedb/test_data_dir/geth.ipc` for private node.
|
||||||
|
|
||||||
**Note the location of the ipc file is outputted when you connect to a blockchain. It is needed to for configuration**
|
**Note the location of the ipc file is printed to the console when you start geth. It is needed to for configuration**
|
||||||
|
|
||||||
## Start Vulcanize DB
|
## Start syncing with postgres
|
||||||
1. Start a blockchain.
|
1. Start geth node (**if fast syncing wait for geth to finsh initial sync**)
|
||||||
2. In a separate terminal start vulcanize_db
|
2. In a separate terminal start vulcanize_db
|
||||||
- `godo vulcanizeDb -- --environment=<some-environment>`
|
- `vulcanizedb sync --config <config.toml> --starting-block-number <block-number>`
|
||||||
|
|
||||||
## Running Listener
|
|
||||||
|
|
||||||
1. Start a blockchain.
|
|
||||||
2. In a separate terminal start listener (ipcDir location)
|
|
||||||
- `godo run -- --environment=<some-environment>`
|
|
||||||
|
|
||||||
## Retrieving Historical Data
|
* see `./environments` for example config
|
||||||
|
|
||||||
1. Start a blockchain.
|
|
||||||
2. In a separate terminal start listener (ipcDir location)
|
|
||||||
- `godo populateBlocks -- --environment=<some-environment> --starting-number=<starting-block-number>`
|
|
||||||
|
|
||||||
## Retrieve Contract Attributes
|
|
||||||
|
|
||||||
1. Add contract ABI to contracts / environment directory:
|
|
||||||
```
|
|
||||||
vulcanizedb/
|
|
||||||
contracts/
|
|
||||||
public/
|
|
||||||
<contract-address>.json
|
|
||||||
private/
|
|
||||||
```
|
|
||||||
The name of the JSON file should correspond the contract's address.
|
|
||||||
2. Start watching the contract `godo watchContract -- --environment=<some-environment> --contract-hash=<contract-address>`
|
|
||||||
3. Request summary data `godo showContractSummary -- --environment=<some-environment> --contract-hash=<contract-address>`
|
|
||||||
|
|
||||||
|
|
||||||
## Retrieving Contract Logs
|
|
||||||
|
|
||||||
1. Get the logs for a specific contract
|
|
||||||
- `godo getLogs -- --environment=<some-environment> --contract-hash=<contract-address>`
|
|
||||||
|
|
||||||
### Configuring Additional Environments
|
|
||||||
|
|
||||||
You can create configuration files for additional environments.
|
|
||||||
|
|
||||||
* Among other things, it will require the IPC file path
|
|
||||||
* See `environments/private.toml` for an example
|
|
||||||
* You will need to do this if you want to run a node connecting to the public blockchain
|
|
||||||
|
|
||||||
|
## Watch specific events
|
||||||
|
1. Start geth
|
||||||
|
2. In a separate terminal start vulcanize_db
|
||||||
|
- `vulcanizedb sync --config <config.toml> --starting-block-number <block-number>`
|
||||||
|
3. Create event filter
|
||||||
|
- `vulcanizedb addFilter --config <config.toml> --filter-filepath <filter.json>`
|
||||||
|
* see `./filters` for example filter
|
||||||
|
4. The filters are tracked in the `log_filters` table and the filtered events
|
||||||
|
will show up in the `watched_log_events` view
|
||||||
|
|
||||||
## Running the Tests
|
## Running the Tests
|
||||||
|
|
||||||
### Unit Tests
|
### Unit Tests
|
||||||
@ -112,7 +57,12 @@ You can create configuration files for additional environments.
|
|||||||
|
|
||||||
### Integration Test
|
### Integration Test
|
||||||
|
|
||||||
In order to run the integration tests, you will need to run them against a real blockchain. At the moment the integration tests require [Geth v1.7.2](https://ethereum.github.io/go-ethereum/downloads/) as they depend on the `--dev` mode, which changed in v1.7.3
|
In order to run the integration tests, you will need to run them against a real node. At the moment the integration tests require [Geth v1.7.2](https://ethereum.github.io/go-ethereum/downloads/) as they depend on the `--dev` mode, which changed in v1.7.3
|
||||||
|
|
||||||
1. Run `./scripts/start_private_blockchain` as a separate process.
|
1. Run `make startprivate` in a separate terminal
|
||||||
2. `go test ./...` to run all tests.
|
2. Setup a test database and import the schema:
|
||||||
|
|
||||||
|
`createdb vulcanize_private`
|
||||||
|
|
||||||
|
`psql vulcanize_private < db/schema.sql`
|
||||||
|
3. `go test ./...` to run all tests.
|
||||||
|
75
cmd/addFilter.go
Normal file
75
cmd/addFilter.go
Normal file
@ -0,0 +1,75 @@
|
|||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"io/ioutil"
|
||||||
|
"log"
|
||||||
|
|
||||||
|
"github.com/vulcanize/vulcanizedb/pkg/filters"
|
||||||
|
"github.com/vulcanize/vulcanizedb/pkg/geth"
|
||||||
|
"github.com/vulcanize/vulcanizedb/utils"
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
)
|
||||||
|
|
||||||
|
// addFilterCmd represents the addFilter command
|
||||||
|
var addFilterCmd = &cobra.Command{
|
||||||
|
Use: "addFilter",
|
||||||
|
Short: "Adds event filter to vulcanizedb",
|
||||||
|
Long: `An event filter is added to the vulcanize_db.
|
||||||
|
All events matching the filter conitions will be tracked
|
||||||
|
in vulcanizedb.
|
||||||
|
|
||||||
|
vulcanizedb addFilter --config config.toml --filter-filepath filter.json
|
||||||
|
|
||||||
|
The event filters are expected to match
|
||||||
|
the format described in the ethereum RPC wiki:
|
||||||
|
|
||||||
|
https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_newfilter
|
||||||
|
|
||||||
|
[{
|
||||||
|
"fromBlock": "0x1",
|
||||||
|
"toBlock": "0x2",
|
||||||
|
"address": "0x8888f1f195afa192cfee860698584c030f4c9db1",
|
||||||
|
"topics": ["0x000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b",
|
||||||
|
null,
|
||||||
|
"0x000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b",
|
||||||
|
"0x0000000000000000000000000aff3454fce5edbc8cca8697c15331677e6ebccc"]
|
||||||
|
}]
|
||||||
|
`,
|
||||||
|
Run: func(cmd *cobra.Command, args []string) {
|
||||||
|
addFilter()
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
var filterFilepath string
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
rootCmd.AddCommand(addFilterCmd)
|
||||||
|
|
||||||
|
addFilterCmd.PersistentFlags().StringVar(&filterFilepath, "filter-filepath", "", "path/to/filter.json")
|
||||||
|
addFilterCmd.MarkFlagRequired("filter-filepath")
|
||||||
|
}
|
||||||
|
|
||||||
|
func addFilter() {
|
||||||
|
if filterFilepath == "" {
|
||||||
|
log.Fatal("filter-filepath required")
|
||||||
|
}
|
||||||
|
var logFilters filters.LogFilters
|
||||||
|
blockchain := geth.NewBlockchain(ipc)
|
||||||
|
repository := utils.LoadPostgres(databaseConfig, blockchain.Node())
|
||||||
|
absFilePath := utils.AbsFilePath(filterFilepath)
|
||||||
|
logFilterBytes, err := ioutil.ReadFile(absFilePath)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
err = json.Unmarshal(logFilterBytes, &logFilters)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
for _, filter := range logFilters {
|
||||||
|
err = repository.AddFilter(filter)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -1,71 +0,0 @@
|
|||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"log"
|
|
||||||
|
|
||||||
"flag"
|
|
||||||
|
|
||||||
"math/big"
|
|
||||||
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/vulcanize/vulcanizedb/cmd"
|
|
||||||
"github.com/vulcanize/vulcanizedb/pkg/core"
|
|
||||||
"github.com/vulcanize/vulcanizedb/pkg/geth"
|
|
||||||
)
|
|
||||||
|
|
||||||
func min(a, b int64) int64 {
|
|
||||||
if a < b {
|
|
||||||
return a
|
|
||||||
}
|
|
||||||
return b
|
|
||||||
}
|
|
||||||
|
|
||||||
const (
|
|
||||||
windowSize = 24
|
|
||||||
pollingInterval = 10 * time.Second
|
|
||||||
)
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
environment := flag.String("environment", "", "Environment name")
|
|
||||||
contractHash := flag.String("contract-hash", "", "Contract hash to show summary")
|
|
||||||
ticker := time.NewTicker(pollingInterval)
|
|
||||||
defer ticker.Stop()
|
|
||||||
|
|
||||||
flag.Parse()
|
|
||||||
|
|
||||||
config := cmd.LoadConfig(*environment)
|
|
||||||
blockchain := geth.NewBlockchain(config.Client.IPCPath)
|
|
||||||
repository := cmd.LoadPostgres(config.Database, blockchain.Node())
|
|
||||||
|
|
||||||
lastBlockNumber := blockchain.LastBlock().Int64()
|
|
||||||
stepSize := int64(1000)
|
|
||||||
|
|
||||||
go func() {
|
|
||||||
for i := int64(0); i < lastBlockNumber; i = min(i+stepSize, lastBlockNumber) {
|
|
||||||
logs, err := blockchain.GetLogs(core.Contract{Hash: *contractHash}, big.NewInt(i), big.NewInt(i+stepSize))
|
|
||||||
log.Println("Backfilling Logs:", i)
|
|
||||||
if err != nil {
|
|
||||||
log.Println(err)
|
|
||||||
}
|
|
||||||
repository.CreateLogs(logs)
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
|
|
||||||
done := make(chan struct{})
|
|
||||||
go func() { done <- struct{}{} }()
|
|
||||||
for range ticker.C {
|
|
||||||
select {
|
|
||||||
case <-done:
|
|
||||||
go func() {
|
|
||||||
z := &big.Int{}
|
|
||||||
z.Sub(blockchain.LastBlock(), big.NewInt(25))
|
|
||||||
log.Printf("Logs Window: %d - %d", z.Int64(), blockchain.LastBlock().Int64())
|
|
||||||
logs, _ := blockchain.GetLogs(core.Contract{Hash: *contractHash}, z, blockchain.LastBlock())
|
|
||||||
repository.CreateLogs(logs)
|
|
||||||
done <- struct{}{}
|
|
||||||
}()
|
|
||||||
default:
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,22 +0,0 @@
|
|||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"flag"
|
|
||||||
|
|
||||||
"fmt"
|
|
||||||
|
|
||||||
"github.com/vulcanize/vulcanizedb/cmd"
|
|
||||||
"github.com/vulcanize/vulcanizedb/pkg/geth"
|
|
||||||
"github.com/vulcanize/vulcanizedb/pkg/history"
|
|
||||||
)
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
environment := flag.String("environment", "", "Environment name")
|
|
||||||
startingBlockNumber := flag.Int("starting-number", -1, "First block to fill from")
|
|
||||||
flag.Parse()
|
|
||||||
config := cmd.LoadConfig(*environment)
|
|
||||||
blockchain := geth.NewBlockchain(config.Client.IPCPath)
|
|
||||||
repository := cmd.LoadPostgres(config.Database, blockchain.Node())
|
|
||||||
numberOfBlocksCreated := history.PopulateMissingBlocks(blockchain, repository, int64(*startingBlockNumber))
|
|
||||||
fmt.Printf("Populated %d blocks", numberOfBlocksCreated)
|
|
||||||
}
|
|
74
cmd/root.go
Normal file
74
cmd/root.go
Normal file
@ -0,0 +1,74 @@
|
|||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
|
||||||
|
"github.com/vulcanize/vulcanizedb/pkg/config"
|
||||||
|
"github.com/mitchellh/go-homedir"
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
"github.com/spf13/viper"
|
||||||
|
)
|
||||||
|
|
||||||
|
var cfgFile string
|
||||||
|
var databaseConfig config.Database
|
||||||
|
var ipc string
|
||||||
|
|
||||||
|
var rootCmd = &cobra.Command{
|
||||||
|
Use: "vulcanizedb",
|
||||||
|
PersistentPreRun: database,
|
||||||
|
}
|
||||||
|
|
||||||
|
func Execute() {
|
||||||
|
if err := rootCmd.Execute(); err != nil {
|
||||||
|
fmt.Println(err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func database(cmd *cobra.Command, args []string) {
|
||||||
|
ipc = viper.GetString("client.ipcpath")
|
||||||
|
databaseConfig = config.Database{
|
||||||
|
Name: viper.GetString("database.name"),
|
||||||
|
Hostname: viper.GetString("database.hostname"),
|
||||||
|
Port: viper.GetInt("database.port"),
|
||||||
|
}
|
||||||
|
viper.Set("database.config", databaseConfig)
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
cobra.OnInitialize(initConfig)
|
||||||
|
|
||||||
|
rootCmd.PersistentFlags().StringVar(&cfgFile, "config", "environment/public.toml", "config file location")
|
||||||
|
rootCmd.PersistentFlags().String("database-name", "vulcanize_public", "database name")
|
||||||
|
rootCmd.PersistentFlags().Int("database-port", 5432, "database port")
|
||||||
|
rootCmd.PersistentFlags().String("database-hostname", "localhost", "database hostname")
|
||||||
|
rootCmd.PersistentFlags().String("client-ipcPath", "", "location of geth.ipc file")
|
||||||
|
|
||||||
|
viper.BindPFlag("database.name", rootCmd.PersistentFlags().Lookup("database-name"))
|
||||||
|
viper.BindPFlag("database.port", rootCmd.PersistentFlags().Lookup("database-port"))
|
||||||
|
viper.BindPFlag("database.hostname", rootCmd.PersistentFlags().Lookup("database-hostname"))
|
||||||
|
viper.BindPFlag("client.ipcPath", rootCmd.PersistentFlags().Lookup("client-ipcPath"))
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
func initConfig() {
|
||||||
|
if cfgFile != "" {
|
||||||
|
viper.SetConfigFile(cfgFile)
|
||||||
|
} else {
|
||||||
|
home, err := homedir.Dir()
|
||||||
|
if err != nil {
|
||||||
|
fmt.Println(err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
viper.AddConfigPath(home)
|
||||||
|
viper.SetConfigName(".vulcanizedb")
|
||||||
|
}
|
||||||
|
|
||||||
|
viper.AutomaticEnv()
|
||||||
|
|
||||||
|
if err := viper.ReadInConfig(); err == nil {
|
||||||
|
fmt.Printf("Using config file: %s\n\n", viper.ConfigFileUsed())
|
||||||
|
}
|
||||||
|
}
|
@ -1,34 +0,0 @@
|
|||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"flag"
|
|
||||||
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"os"
|
|
||||||
|
|
||||||
"github.com/vulcanize/vulcanizedb/cmd"
|
|
||||||
"github.com/vulcanize/vulcanizedb/pkg/geth"
|
|
||||||
"github.com/vulcanize/vulcanizedb/pkg/history"
|
|
||||||
)
|
|
||||||
|
|
||||||
const (
|
|
||||||
pollingInterval = 7 * time.Second
|
|
||||||
)
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
ticker := time.NewTicker(pollingInterval)
|
|
||||||
defer ticker.Stop()
|
|
||||||
|
|
||||||
environment := flag.String("environment", "", "Environment name")
|
|
||||||
flag.Parse()
|
|
||||||
config := cmd.LoadConfig(*environment)
|
|
||||||
blockchain := geth.NewBlockchain(config.Client.IPCPath)
|
|
||||||
repository := cmd.LoadPostgres(config.Database, blockchain.Node())
|
|
||||||
validator := history.NewBlockValidator(blockchain, repository, 15)
|
|
||||||
|
|
||||||
for range ticker.C {
|
|
||||||
window := validator.ValidateBlocks()
|
|
||||||
validator.Log(os.Stdout, window)
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,31 +0,0 @@
|
|||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"flag"
|
|
||||||
|
|
||||||
"log"
|
|
||||||
|
|
||||||
"fmt"
|
|
||||||
|
|
||||||
"github.com/vulcanize/vulcanizedb/cmd"
|
|
||||||
"github.com/vulcanize/vulcanizedb/pkg/contract_summary"
|
|
||||||
"github.com/vulcanize/vulcanizedb/pkg/geth"
|
|
||||||
)
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
environment := flag.String("environment", "", "Environment name")
|
|
||||||
contractHash := flag.String("contract-hash", "", "Contract hash to show summary")
|
|
||||||
_blockNumber := flag.Int64("block-number", -1, "Block number of summary")
|
|
||||||
flag.Parse()
|
|
||||||
config := cmd.LoadConfig(*environment)
|
|
||||||
blockchain := geth.NewBlockchain(config.Client.IPCPath)
|
|
||||||
repository := cmd.LoadPostgres(config.Database, blockchain.Node())
|
|
||||||
blockNumber := cmd.RequestedBlockNumber(_blockNumber)
|
|
||||||
|
|
||||||
contractSummary, err := contract_summary.NewSummary(blockchain, repository, *contractHash, blockNumber)
|
|
||||||
if err != nil {
|
|
||||||
log.Fatalln(err)
|
|
||||||
}
|
|
||||||
output := contract_summary.GenerateConsoleOutput(contractSummary)
|
|
||||||
fmt.Println(output)
|
|
||||||
}
|
|
81
cmd/sync.go
Normal file
81
cmd/sync.go
Normal file
@ -0,0 +1,81 @@
|
|||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/vulcanize/vulcanizedb/pkg/core"
|
||||||
|
"github.com/vulcanize/vulcanizedb/pkg/geth"
|
||||||
|
"github.com/vulcanize/vulcanizedb/pkg/history"
|
||||||
|
"github.com/vulcanize/vulcanizedb/pkg/repositories"
|
||||||
|
"github.com/vulcanize/vulcanizedb/utils"
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
"log"
|
||||||
|
)
|
||||||
|
|
||||||
|
// syncCmd represents the sync command
|
||||||
|
var syncCmd = &cobra.Command{
|
||||||
|
Use: "sync",
|
||||||
|
Short: "Syncs vulcanizedb with local ethereum node",
|
||||||
|
Long: `Syncs vulcanizedb with local ethereum node.
|
||||||
|
vulcanizedb sync --startingBlockNumber 0 --config public.toml
|
||||||
|
|
||||||
|
Expects ethereum node to be running and requires a .toml config:
|
||||||
|
|
||||||
|
[database]
|
||||||
|
name = "vulcanize_public"
|
||||||
|
hostname = "localhost"
|
||||||
|
port = 5432
|
||||||
|
|
||||||
|
[client]
|
||||||
|
ipcPath = "/Users/mattkrump/Library/Ethereum/geth.ipc"
|
||||||
|
`,
|
||||||
|
Run: func(cmd *cobra.Command, args []string) {
|
||||||
|
sync()
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
const (
|
||||||
|
pollingInterval = 7 * time.Second
|
||||||
|
)
|
||||||
|
|
||||||
|
var startingBlockNumber int
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
rootCmd.AddCommand(syncCmd)
|
||||||
|
|
||||||
|
syncCmd.Flags().IntVarP(&startingBlockNumber, "starting-block-number", "s", 0, "Block number to start syncing from")
|
||||||
|
}
|
||||||
|
|
||||||
|
func backFillAllBlocks(blockchain core.Blockchain, repository repositories.Postgres, missingBlocksPopulated chan int, startingBlockNumber int64) {
|
||||||
|
go func() {
|
||||||
|
missingBlocksPopulated <- history.PopulateMissingBlocks(blockchain, repository, startingBlockNumber)
|
||||||
|
}()
|
||||||
|
}
|
||||||
|
|
||||||
|
func sync() {
|
||||||
|
ticker := time.NewTicker(pollingInterval)
|
||||||
|
defer ticker.Stop()
|
||||||
|
|
||||||
|
blockchain := geth.NewBlockchain(ipc)
|
||||||
|
if blockchain.LastBlock().Int64() == 0 {
|
||||||
|
log.Fatal("geth initial: state sync not finished")
|
||||||
|
}
|
||||||
|
repository := utils.LoadPostgres(databaseConfig, blockchain.Node())
|
||||||
|
validator := history.NewBlockValidator(blockchain, repository, 15)
|
||||||
|
|
||||||
|
missingBlocksPopulated := make(chan int)
|
||||||
|
_startingBlockNumber := int64(startingBlockNumber)
|
||||||
|
go backFillAllBlocks(blockchain, repository, missingBlocksPopulated, _startingBlockNumber)
|
||||||
|
|
||||||
|
for {
|
||||||
|
select {
|
||||||
|
case <-ticker.C:
|
||||||
|
window := validator.ValidateBlocks()
|
||||||
|
validator.Log(os.Stdout, window)
|
||||||
|
case <-missingBlocksPopulated:
|
||||||
|
go backFillAllBlocks(blockchain, repository, missingBlocksPopulated, _startingBlockNumber)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -1,50 +0,0 @@
|
|||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"flag"
|
|
||||||
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"os"
|
|
||||||
|
|
||||||
"github.com/vulcanize/vulcanizedb/cmd"
|
|
||||||
"github.com/vulcanize/vulcanizedb/pkg/core"
|
|
||||||
"github.com/vulcanize/vulcanizedb/pkg/geth"
|
|
||||||
"github.com/vulcanize/vulcanizedb/pkg/history"
|
|
||||||
"github.com/vulcanize/vulcanizedb/pkg/repositories"
|
|
||||||
)
|
|
||||||
|
|
||||||
const (
|
|
||||||
pollingInterval = 7 * time.Second
|
|
||||||
)
|
|
||||||
|
|
||||||
func backFillAllBlocks(blockchain core.Blockchain, repository repositories.Postgres, missingBlocksPopulated chan int) {
|
|
||||||
go func() {
|
|
||||||
missingBlocksPopulated <- history.PopulateMissingBlocks(blockchain, repository, 0)
|
|
||||||
}()
|
|
||||||
}
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
ticker := time.NewTicker(pollingInterval)
|
|
||||||
defer ticker.Stop()
|
|
||||||
|
|
||||||
environment := flag.String("environment", "", "Environment name")
|
|
||||||
flag.Parse()
|
|
||||||
config := cmd.LoadConfig(*environment)
|
|
||||||
blockchain := geth.NewBlockchain(config.Client.IPCPath)
|
|
||||||
repository := cmd.LoadPostgres(config.Database, blockchain.Node())
|
|
||||||
validator := history.NewBlockValidator(blockchain, repository, 15)
|
|
||||||
|
|
||||||
missingBlocksPopulated := make(chan int)
|
|
||||||
go backFillAllBlocks(blockchain, repository, missingBlocksPopulated)
|
|
||||||
|
|
||||||
for {
|
|
||||||
select {
|
|
||||||
case <-ticker.C:
|
|
||||||
window := validator.ValidateBlocks()
|
|
||||||
validator.Log(os.Stdout, window)
|
|
||||||
case <-missingBlocksPopulated:
|
|
||||||
go backFillAllBlocks(blockchain, repository, missingBlocksPopulated)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,26 +0,0 @@
|
|||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"flag"
|
|
||||||
|
|
||||||
"github.com/vulcanize/vulcanizedb/cmd"
|
|
||||||
"github.com/vulcanize/vulcanizedb/pkg/core"
|
|
||||||
"github.com/vulcanize/vulcanizedb/pkg/geth"
|
|
||||||
)
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
environment := flag.String("environment", "", "Environment name")
|
|
||||||
contractHash := flag.String("contract-hash", "", "contract-hash=x1234")
|
|
||||||
abiFilepath := flag.String("abi-filepath", "", "path/to/abifile.json")
|
|
||||||
flag.Parse()
|
|
||||||
|
|
||||||
contractAbiString := cmd.GetAbi(*abiFilepath, *contractHash)
|
|
||||||
config := cmd.LoadConfig(*environment)
|
|
||||||
blockchain := geth.NewBlockchain(config.Client.IPCPath)
|
|
||||||
repository := cmd.LoadPostgres(config.Database, blockchain.Node())
|
|
||||||
watchedContract := core.Contract{
|
|
||||||
Abi: contractAbiString,
|
|
||||||
Hash: *contractHash,
|
|
||||||
}
|
|
||||||
repository.CreateContract(watchedContract)
|
|
||||||
}
|
|
3
db/migrations/1515613181_add_fields_to_node.down.sql
Normal file
3
db/migrations/1515613181_add_fields_to_node.down.sql
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
ALTER TABLE nodes
|
||||||
|
DROP COLUMN node_id,
|
||||||
|
DROP COLUMN client_name;
|
3
db/migrations/1515613181_add_fields_to_node.up.sql
Normal file
3
db/migrations/1515613181_add_fields_to_node.up.sql
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
ALTER TABLE nodes
|
||||||
|
ADD COLUMN node_id VARCHAR(128),
|
||||||
|
ADD COLUMN client_name VARCHAR;
|
9
db/migrations/1515613715_update_node_index.down.sql
Normal file
9
db/migrations/1515613715_update_node_index.down.sql
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
BEGIN;
|
||||||
|
|
||||||
|
ALTER TABLE nodes
|
||||||
|
DROP CONSTRAINT node_uc;
|
||||||
|
|
||||||
|
ALTER TABLE nodes
|
||||||
|
ADD CONSTRAINT node_uc UNIQUE (genesis_block, network_id);
|
||||||
|
|
||||||
|
COMMIT;
|
9
db/migrations/1515613715_update_node_index.up.sql
Normal file
9
db/migrations/1515613715_update_node_index.up.sql
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
BEGIN;
|
||||||
|
|
||||||
|
ALTER TABLE nodes
|
||||||
|
DROP CONSTRAINT node_uc;
|
||||||
|
|
||||||
|
ALTER TABLE nodes
|
||||||
|
ADD CONSTRAINT node_uc UNIQUE (genesis_block, network_id, node_id);
|
||||||
|
|
||||||
|
COMMIT;
|
12
db/migrations/1516050071_add_log_fk_constraint.down.sql
Normal file
12
db/migrations/1516050071_add_log_fk_constraint.down.sql
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
BEGIN;
|
||||||
|
|
||||||
|
ALTER TABLE logs
|
||||||
|
DROP CONSTRAINT receipts_fk;
|
||||||
|
|
||||||
|
ALTER TABLE logs
|
||||||
|
DROP COLUMN receipt_id;
|
||||||
|
|
||||||
|
ALTER TABLE logs
|
||||||
|
ADD CONSTRAINT log_uc UNIQUE (block_number, index);
|
||||||
|
|
||||||
|
COMMIT;
|
14
db/migrations/1516050071_add_log_fk_constraint.up.sql
Normal file
14
db/migrations/1516050071_add_log_fk_constraint.up.sql
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
BEGIN;
|
||||||
|
ALTER TABLE logs
|
||||||
|
DROP CONSTRAINT log_uc;
|
||||||
|
|
||||||
|
ALTER TABLE logs
|
||||||
|
ADD COLUMN receipt_id INT;
|
||||||
|
|
||||||
|
ALTER TABLE logs
|
||||||
|
ADD CONSTRAINT receipts_fk
|
||||||
|
FOREIGN KEY (receipt_id)
|
||||||
|
REFERENCES receipts (id)
|
||||||
|
ON DELETE CASCADE;
|
||||||
|
|
||||||
|
COMMIT;
|
1
db/migrations/1516648743_add_log_filters.down.sql
Normal file
1
db/migrations/1516648743_add_log_filters.down.sql
Normal file
@ -0,0 +1 @@
|
|||||||
|
DROP TABLE log_filters;
|
12
db/migrations/1516648743_add_log_filters.up.sql
Normal file
12
db/migrations/1516648743_add_log_filters.up.sql
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
CREATE TABLE log_filters (
|
||||||
|
id SERIAL,
|
||||||
|
name VARCHAR NOT NULL CHECK (name <> ''),
|
||||||
|
from_block BIGINT CHECK (from_block >= 0),
|
||||||
|
to_block BIGINT CHECK (from_block >= 0),
|
||||||
|
address VARCHAR(66),
|
||||||
|
topic0 VARCHAR(66),
|
||||||
|
topic1 VARCHAR(66),
|
||||||
|
topic2 VARCHAR(66),
|
||||||
|
topic3 VARCHAR(66),
|
||||||
|
CONSTRAINT name_uc UNIQUE (name)
|
||||||
|
);
|
2
db/migrations/1516653373_add_watched_event_logs.down.sql
Normal file
2
db/migrations/1516653373_add_watched_event_logs.down.sql
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
DROP VIEW watched_event_logs;
|
||||||
|
DROP VIEW block_stats;
|
29
db/migrations/1516653373_add_watched_event_logs.up.sql
Normal file
29
db/migrations/1516653373_add_watched_event_logs.up.sql
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
CREATE VIEW block_stats AS
|
||||||
|
SELECT
|
||||||
|
max(block_number) AS max_block,
|
||||||
|
min(block_number) AS min_block
|
||||||
|
FROM logs;
|
||||||
|
|
||||||
|
CREATE VIEW watched_event_logs AS
|
||||||
|
SELECT
|
||||||
|
log_filters.name,
|
||||||
|
logs.id,
|
||||||
|
block_number,
|
||||||
|
logs.address,
|
||||||
|
tx_hash,
|
||||||
|
index,
|
||||||
|
logs.topic0,
|
||||||
|
logs.topic1,
|
||||||
|
logs.topic2,
|
||||||
|
logs.topic3,
|
||||||
|
data,
|
||||||
|
receipt_id
|
||||||
|
FROM log_filters
|
||||||
|
CROSS JOIN block_stats
|
||||||
|
JOIN logs ON logs.address = log_filters.address
|
||||||
|
AND logs.block_number >= coalesce(log_filters.from_block, block_stats.min_block)
|
||||||
|
AND logs.block_number <= coalesce(log_filters.to_block, block_stats.max_block)
|
||||||
|
WHERE (log_filters.topic0 = logs.topic0 OR log_filters.topic0 ISNULL)
|
||||||
|
AND (log_filters.topic1 = logs.topic1 OR log_filters.topic1 ISNULL)
|
||||||
|
AND (log_filters.topic2 = logs.topic2 OR log_filters.topic2 ISNULL)
|
||||||
|
AND (log_filters.topic3 = logs.topic3 OR log_filters.topic3 ISNULL);
|
556
db/schema.sql
Normal file
556
db/schema.sql
Normal file
@ -0,0 +1,556 @@
|
|||||||
|
--
|
||||||
|
-- PostgreSQL database dump
|
||||||
|
--
|
||||||
|
|
||||||
|
-- Dumped from database version 10.1
|
||||||
|
-- Dumped by pg_dump version 10.1
|
||||||
|
|
||||||
|
SET statement_timeout = 0;
|
||||||
|
SET lock_timeout = 0;
|
||||||
|
SET idle_in_transaction_session_timeout = 0;
|
||||||
|
SET client_encoding = 'UTF8';
|
||||||
|
SET standard_conforming_strings = on;
|
||||||
|
SET check_function_bodies = false;
|
||||||
|
SET client_min_messages = warning;
|
||||||
|
SET row_security = off;
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: plpgsql; Type: EXTENSION; Schema: -; Owner: -
|
||||||
|
--
|
||||||
|
|
||||||
|
CREATE EXTENSION IF NOT EXISTS plpgsql WITH SCHEMA pg_catalog;
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: EXTENSION plpgsql; Type: COMMENT; Schema: -; Owner: -
|
||||||
|
--
|
||||||
|
|
||||||
|
COMMENT ON EXTENSION plpgsql IS 'PL/pgSQL procedural language';
|
||||||
|
|
||||||
|
|
||||||
|
SET search_path = public, pg_catalog;
|
||||||
|
|
||||||
|
SET default_tablespace = '';
|
||||||
|
|
||||||
|
SET default_with_oids = false;
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: logs; Type: TABLE; Schema: public; Owner: -
|
||||||
|
--
|
||||||
|
|
||||||
|
CREATE TABLE logs (
|
||||||
|
id integer NOT NULL,
|
||||||
|
block_number bigint,
|
||||||
|
address character varying(66),
|
||||||
|
tx_hash character varying(66),
|
||||||
|
index bigint,
|
||||||
|
topic0 character varying(66),
|
||||||
|
topic1 character varying(66),
|
||||||
|
topic2 character varying(66),
|
||||||
|
topic3 character varying(66),
|
||||||
|
data text,
|
||||||
|
receipt_id integer
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: block_stats; Type: VIEW; Schema: public; Owner: -
|
||||||
|
--
|
||||||
|
|
||||||
|
CREATE VIEW block_stats AS
|
||||||
|
SELECT max(logs.block_number) AS max_block,
|
||||||
|
min(logs.block_number) AS min_block
|
||||||
|
FROM logs;
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: blocks; Type: TABLE; Schema: public; Owner: -
|
||||||
|
--
|
||||||
|
|
||||||
|
CREATE TABLE blocks (
|
||||||
|
block_number bigint,
|
||||||
|
block_gaslimit double precision,
|
||||||
|
block_gasused double precision,
|
||||||
|
block_time double precision,
|
||||||
|
id integer NOT NULL,
|
||||||
|
block_difficulty bigint,
|
||||||
|
block_hash character varying(66),
|
||||||
|
block_nonce character varying(20),
|
||||||
|
block_parenthash character varying(66),
|
||||||
|
block_size bigint,
|
||||||
|
uncle_hash character varying(66),
|
||||||
|
node_id integer NOT NULL,
|
||||||
|
is_final boolean,
|
||||||
|
block_miner character varying(42),
|
||||||
|
block_extra_data character varying,
|
||||||
|
block_reward numeric,
|
||||||
|
block_uncles_reward numeric
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: blocks_id_seq; Type: SEQUENCE; Schema: public; Owner: -
|
||||||
|
--
|
||||||
|
|
||||||
|
CREATE SEQUENCE blocks_id_seq
|
||||||
|
AS integer
|
||||||
|
START WITH 1
|
||||||
|
INCREMENT BY 1
|
||||||
|
NO MINVALUE
|
||||||
|
NO MAXVALUE
|
||||||
|
CACHE 1;
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: blocks_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
|
||||||
|
--
|
||||||
|
|
||||||
|
ALTER SEQUENCE blocks_id_seq OWNED BY blocks.id;
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: log_filters; Type: TABLE; Schema: public; Owner: -
|
||||||
|
--
|
||||||
|
|
||||||
|
CREATE TABLE log_filters (
|
||||||
|
id integer NOT NULL,
|
||||||
|
name character varying NOT NULL,
|
||||||
|
from_block bigint,
|
||||||
|
to_block bigint,
|
||||||
|
address character varying(66),
|
||||||
|
topic0 character varying(66),
|
||||||
|
topic1 character varying(66),
|
||||||
|
topic2 character varying(66),
|
||||||
|
topic3 character varying(66),
|
||||||
|
CONSTRAINT log_filters_from_block_check CHECK ((from_block >= 0)),
|
||||||
|
CONSTRAINT log_filters_from_block_check1 CHECK ((from_block >= 0)),
|
||||||
|
CONSTRAINT log_filters_name_check CHECK (((name)::text <> ''::text))
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: log_filters_id_seq; Type: SEQUENCE; Schema: public; Owner: -
|
||||||
|
--
|
||||||
|
|
||||||
|
CREATE SEQUENCE log_filters_id_seq
|
||||||
|
AS integer
|
||||||
|
START WITH 1
|
||||||
|
INCREMENT BY 1
|
||||||
|
NO MINVALUE
|
||||||
|
NO MAXVALUE
|
||||||
|
CACHE 1;
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: log_filters_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
|
||||||
|
--
|
||||||
|
|
||||||
|
ALTER SEQUENCE log_filters_id_seq OWNED BY log_filters.id;
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: logs_id_seq; Type: SEQUENCE; Schema: public; Owner: -
|
||||||
|
--
|
||||||
|
|
||||||
|
CREATE SEQUENCE logs_id_seq
|
||||||
|
AS integer
|
||||||
|
START WITH 1
|
||||||
|
INCREMENT BY 1
|
||||||
|
NO MINVALUE
|
||||||
|
NO MAXVALUE
|
||||||
|
CACHE 1;
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: logs_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
|
||||||
|
--
|
||||||
|
|
||||||
|
ALTER SEQUENCE logs_id_seq OWNED BY logs.id;
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: nodes; Type: TABLE; Schema: public; Owner: -
|
||||||
|
--
|
||||||
|
|
||||||
|
CREATE TABLE nodes (
|
||||||
|
id integer NOT NULL,
|
||||||
|
genesis_block character varying(66),
|
||||||
|
network_id numeric,
|
||||||
|
node_id character varying(128),
|
||||||
|
client_name character varying
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: nodes_id_seq; Type: SEQUENCE; Schema: public; Owner: -
|
||||||
|
--
|
||||||
|
|
||||||
|
CREATE SEQUENCE nodes_id_seq
|
||||||
|
AS integer
|
||||||
|
START WITH 1
|
||||||
|
INCREMENT BY 1
|
||||||
|
NO MINVALUE
|
||||||
|
NO MAXVALUE
|
||||||
|
CACHE 1;
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: nodes_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
|
||||||
|
--
|
||||||
|
|
||||||
|
ALTER SEQUENCE nodes_id_seq OWNED BY nodes.id;
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: receipts; Type: TABLE; Schema: public; Owner: -
|
||||||
|
--
|
||||||
|
|
||||||
|
CREATE TABLE receipts (
|
||||||
|
id integer NOT NULL,
|
||||||
|
transaction_id integer NOT NULL,
|
||||||
|
contract_address character varying(42),
|
||||||
|
cumulative_gas_used numeric,
|
||||||
|
gas_used numeric,
|
||||||
|
state_root character varying(66),
|
||||||
|
status integer,
|
||||||
|
tx_hash character varying(66)
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: receipts_id_seq; Type: SEQUENCE; Schema: public; Owner: -
|
||||||
|
--
|
||||||
|
|
||||||
|
CREATE SEQUENCE receipts_id_seq
|
||||||
|
AS integer
|
||||||
|
START WITH 1
|
||||||
|
INCREMENT BY 1
|
||||||
|
NO MINVALUE
|
||||||
|
NO MAXVALUE
|
||||||
|
CACHE 1;
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: receipts_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
|
||||||
|
--
|
||||||
|
|
||||||
|
ALTER SEQUENCE receipts_id_seq OWNED BY receipts.id;
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: schema_migrations; Type: TABLE; Schema: public; Owner: -
|
||||||
|
--
|
||||||
|
|
||||||
|
CREATE TABLE schema_migrations (
|
||||||
|
version bigint NOT NULL,
|
||||||
|
dirty boolean NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: transactions; Type: TABLE; Schema: public; Owner: -
|
||||||
|
--
|
||||||
|
|
||||||
|
CREATE TABLE transactions (
|
||||||
|
id integer NOT NULL,
|
||||||
|
tx_hash character varying(66),
|
||||||
|
tx_nonce numeric,
|
||||||
|
tx_to character varying(66),
|
||||||
|
tx_gaslimit numeric,
|
||||||
|
tx_gasprice numeric,
|
||||||
|
tx_value numeric,
|
||||||
|
block_id integer NOT NULL,
|
||||||
|
tx_from character varying(66),
|
||||||
|
tx_input_data character varying
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: transactions_id_seq; Type: SEQUENCE; Schema: public; Owner: -
|
||||||
|
--
|
||||||
|
|
||||||
|
CREATE SEQUENCE transactions_id_seq
|
||||||
|
AS integer
|
||||||
|
START WITH 1
|
||||||
|
INCREMENT BY 1
|
||||||
|
NO MINVALUE
|
||||||
|
NO MAXVALUE
|
||||||
|
CACHE 1;
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: transactions_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
|
||||||
|
--
|
||||||
|
|
||||||
|
ALTER SEQUENCE transactions_id_seq OWNED BY transactions.id;
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: watched_contracts; Type: TABLE; Schema: public; Owner: -
|
||||||
|
--
|
||||||
|
|
||||||
|
CREATE TABLE watched_contracts (
|
||||||
|
contract_id integer NOT NULL,
|
||||||
|
contract_hash character varying(66),
|
||||||
|
contract_abi json
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: watched_contracts_contract_id_seq; Type: SEQUENCE; Schema: public; Owner: -
|
||||||
|
--
|
||||||
|
|
||||||
|
CREATE SEQUENCE watched_contracts_contract_id_seq
|
||||||
|
AS integer
|
||||||
|
START WITH 1
|
||||||
|
INCREMENT BY 1
|
||||||
|
NO MINVALUE
|
||||||
|
NO MAXVALUE
|
||||||
|
CACHE 1;
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: watched_contracts_contract_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
|
||||||
|
--
|
||||||
|
|
||||||
|
ALTER SEQUENCE watched_contracts_contract_id_seq OWNED BY watched_contracts.contract_id;
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: watched_event_logs; Type: VIEW; Schema: public; Owner: -
|
||||||
|
--
|
||||||
|
|
||||||
|
CREATE VIEW watched_event_logs AS
|
||||||
|
SELECT log_filters.name,
|
||||||
|
logs.id,
|
||||||
|
logs.block_number,
|
||||||
|
logs.address,
|
||||||
|
logs.tx_hash,
|
||||||
|
logs.index,
|
||||||
|
logs.topic0,
|
||||||
|
logs.topic1,
|
||||||
|
logs.topic2,
|
||||||
|
logs.topic3,
|
||||||
|
logs.data,
|
||||||
|
logs.receipt_id
|
||||||
|
FROM ((log_filters
|
||||||
|
CROSS JOIN block_stats)
|
||||||
|
JOIN logs ON ((((logs.address)::text = (log_filters.address)::text) AND (logs.block_number >= COALESCE(log_filters.from_block, block_stats.min_block)) AND (logs.block_number <= COALESCE(log_filters.to_block, block_stats.max_block)))))
|
||||||
|
WHERE ((((log_filters.topic0)::text = (logs.topic0)::text) OR (log_filters.topic0 IS NULL)) AND (((log_filters.topic1)::text = (logs.topic1)::text) OR (log_filters.topic1 IS NULL)) AND (((log_filters.topic2)::text = (logs.topic2)::text) OR (log_filters.topic2 IS NULL)) AND (((log_filters.topic3)::text = (logs.topic3)::text) OR (log_filters.topic3 IS NULL)));
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: blocks id; Type: DEFAULT; Schema: public; Owner: -
|
||||||
|
--
|
||||||
|
|
||||||
|
ALTER TABLE ONLY blocks ALTER COLUMN id SET DEFAULT nextval('blocks_id_seq'::regclass);
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: log_filters id; Type: DEFAULT; Schema: public; Owner: -
|
||||||
|
--
|
||||||
|
|
||||||
|
ALTER TABLE ONLY log_filters ALTER COLUMN id SET DEFAULT nextval('log_filters_id_seq'::regclass);
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: logs id; Type: DEFAULT; Schema: public; Owner: -
|
||||||
|
--
|
||||||
|
|
||||||
|
ALTER TABLE ONLY logs ALTER COLUMN id SET DEFAULT nextval('logs_id_seq'::regclass);
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: nodes id; Type: DEFAULT; Schema: public; Owner: -
|
||||||
|
--
|
||||||
|
|
||||||
|
ALTER TABLE ONLY nodes ALTER COLUMN id SET DEFAULT nextval('nodes_id_seq'::regclass);
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: receipts id; Type: DEFAULT; Schema: public; Owner: -
|
||||||
|
--
|
||||||
|
|
||||||
|
ALTER TABLE ONLY receipts ALTER COLUMN id SET DEFAULT nextval('receipts_id_seq'::regclass);
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: transactions id; Type: DEFAULT; Schema: public; Owner: -
|
||||||
|
--
|
||||||
|
|
||||||
|
ALTER TABLE ONLY transactions ALTER COLUMN id SET DEFAULT nextval('transactions_id_seq'::regclass);
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: watched_contracts contract_id; Type: DEFAULT; Schema: public; Owner: -
|
||||||
|
--
|
||||||
|
|
||||||
|
ALTER TABLE ONLY watched_contracts ALTER COLUMN contract_id SET DEFAULT nextval('watched_contracts_contract_id_seq'::regclass);
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: blocks blocks_pkey; Type: CONSTRAINT; Schema: public; Owner: -
|
||||||
|
--
|
||||||
|
|
||||||
|
ALTER TABLE ONLY blocks
|
||||||
|
ADD CONSTRAINT blocks_pkey PRIMARY KEY (id);
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: watched_contracts contract_hash_uc; Type: CONSTRAINT; Schema: public; Owner: -
|
||||||
|
--
|
||||||
|
|
||||||
|
ALTER TABLE ONLY watched_contracts
|
||||||
|
ADD CONSTRAINT contract_hash_uc UNIQUE (contract_hash);
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: logs logs_pkey; Type: CONSTRAINT; Schema: public; Owner: -
|
||||||
|
--
|
||||||
|
|
||||||
|
ALTER TABLE ONLY logs
|
||||||
|
ADD CONSTRAINT logs_pkey PRIMARY KEY (id);
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: log_filters name_uc; Type: CONSTRAINT; Schema: public; Owner: -
|
||||||
|
--
|
||||||
|
|
||||||
|
ALTER TABLE ONLY log_filters
|
||||||
|
ADD CONSTRAINT name_uc UNIQUE (name);
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: blocks node_id_block_number_uc; Type: CONSTRAINT; Schema: public; Owner: -
|
||||||
|
--
|
||||||
|
|
||||||
|
ALTER TABLE ONLY blocks
|
||||||
|
ADD CONSTRAINT node_id_block_number_uc UNIQUE (block_number, node_id);
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: nodes node_uc; Type: CONSTRAINT; Schema: public; Owner: -
|
||||||
|
--
|
||||||
|
|
||||||
|
ALTER TABLE ONLY nodes
|
||||||
|
ADD CONSTRAINT node_uc UNIQUE (genesis_block, network_id, node_id);
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: nodes nodes_pkey; Type: CONSTRAINT; Schema: public; Owner: -
|
||||||
|
--
|
||||||
|
|
||||||
|
ALTER TABLE ONLY nodes
|
||||||
|
ADD CONSTRAINT nodes_pkey PRIMARY KEY (id);
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: receipts receipts_pkey; Type: CONSTRAINT; Schema: public; Owner: -
|
||||||
|
--
|
||||||
|
|
||||||
|
ALTER TABLE ONLY receipts
|
||||||
|
ADD CONSTRAINT receipts_pkey PRIMARY KEY (id);
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: schema_migrations schema_migrations_pkey; Type: CONSTRAINT; Schema: public; Owner: -
|
||||||
|
--
|
||||||
|
|
||||||
|
ALTER TABLE ONLY schema_migrations
|
||||||
|
ADD CONSTRAINT schema_migrations_pkey PRIMARY KEY (version);
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: transactions transactions_pkey; Type: CONSTRAINT; Schema: public; Owner: -
|
||||||
|
--
|
||||||
|
|
||||||
|
ALTER TABLE ONLY transactions
|
||||||
|
ADD CONSTRAINT transactions_pkey PRIMARY KEY (id);
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: watched_contracts watched_contracts_pkey; Type: CONSTRAINT; Schema: public; Owner: -
|
||||||
|
--
|
||||||
|
|
||||||
|
ALTER TABLE ONLY watched_contracts
|
||||||
|
ADD CONSTRAINT watched_contracts_pkey PRIMARY KEY (contract_id);
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: block_id_index; Type: INDEX; Schema: public; Owner: -
|
||||||
|
--
|
||||||
|
|
||||||
|
CREATE INDEX block_id_index ON transactions USING btree (block_id);
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: block_number_index; Type: INDEX; Schema: public; Owner: -
|
||||||
|
--
|
||||||
|
|
||||||
|
CREATE INDEX block_number_index ON blocks USING btree (block_number);
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: node_id_index; Type: INDEX; Schema: public; Owner: -
|
||||||
|
--
|
||||||
|
|
||||||
|
CREATE INDEX node_id_index ON blocks USING btree (node_id);
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: transaction_id_index; Type: INDEX; Schema: public; Owner: -
|
||||||
|
--
|
||||||
|
|
||||||
|
CREATE INDEX transaction_id_index ON receipts USING btree (transaction_id);
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: tx_from_index; Type: INDEX; Schema: public; Owner: -
|
||||||
|
--
|
||||||
|
|
||||||
|
CREATE INDEX tx_from_index ON transactions USING btree (tx_from);
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: tx_to_index; Type: INDEX; Schema: public; Owner: -
|
||||||
|
--
|
||||||
|
|
||||||
|
CREATE INDEX tx_to_index ON transactions USING btree (tx_to);
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: transactions blocks_fk; Type: FK CONSTRAINT; Schema: public; Owner: -
|
||||||
|
--
|
||||||
|
|
||||||
|
ALTER TABLE ONLY transactions
|
||||||
|
ADD CONSTRAINT blocks_fk FOREIGN KEY (block_id) REFERENCES blocks(id) ON DELETE CASCADE;
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: blocks node_fk; Type: FK CONSTRAINT; Schema: public; Owner: -
|
||||||
|
--
|
||||||
|
|
||||||
|
ALTER TABLE ONLY blocks
|
||||||
|
ADD CONSTRAINT node_fk FOREIGN KEY (node_id) REFERENCES nodes(id) ON DELETE CASCADE;
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: logs receipts_fk; Type: FK CONSTRAINT; Schema: public; Owner: -
|
||||||
|
--
|
||||||
|
|
||||||
|
ALTER TABLE ONLY logs
|
||||||
|
ADD CONSTRAINT receipts_fk FOREIGN KEY (receipt_id) REFERENCES receipts(id) ON DELETE CASCADE;
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: receipts transaction_fk; Type: FK CONSTRAINT; Schema: public; Owner: -
|
||||||
|
--
|
||||||
|
|
||||||
|
ALTER TABLE ONLY receipts
|
||||||
|
ADD CONSTRAINT transaction_fk FOREIGN KEY (transaction_id) REFERENCES transactions(id) ON DELETE CASCADE;
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- PostgreSQL database dump complete
|
||||||
|
--
|
||||||
|
|
15
filters/example-filter.json
Normal file
15
filters/example-filter.json
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
[{
|
||||||
|
"name": "TransferFilter",
|
||||||
|
"fromBlock": "0x488290",
|
||||||
|
"toBlock": "0x488678",
|
||||||
|
"address": "0x06012c8cf97bead5deae237070f9587f8e7a266d",
|
||||||
|
"topics": ["0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "NewFilter",
|
||||||
|
"toBlock": "0x4B34AA",
|
||||||
|
"fromBlock": "0x4B34AD",
|
||||||
|
"address": "0x06012c8cf97bead5deae237070f9587f8e7a266d",
|
||||||
|
"topics": ["0x241ea03ca20251805084d27d4440371c34a0b85ff108f6bb5611248f73818b80"]
|
||||||
|
}]
|
||||||
|
|
@ -15,8 +15,7 @@ import (
|
|||||||
|
|
||||||
var _ = Describe("Reading contracts", func() {
|
var _ = Describe("Reading contracts", func() {
|
||||||
|
|
||||||
//TODO was experiencing Infura issue (I suspect) on 1/5. Unignore these and revisit if persists on next commit
|
Describe("Reading the list of attributes", func() {
|
||||||
XDescribe("Reading the list of attributes", func() {
|
|
||||||
It("returns a string attribute for a real contract", func() {
|
It("returns a string attribute for a real contract", func() {
|
||||||
config, err := cfg.NewConfig("infura")
|
config, err := cfg.NewConfig("infura")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -59,8 +58,7 @@ var _ = Describe("Reading contracts", func() {
|
|||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
//TODO was experiencing Infura issue (I suspect) on 1/5. Unignore these and revisit if persists on next commit
|
Describe("Getting a contract attribute", func() {
|
||||||
XDescribe("Getting a contract attribute", func() {
|
|
||||||
It("returns the correct attribute for a real contract", func() {
|
It("returns the correct attribute for a real contract", func() {
|
||||||
config, _ := cfg.NewConfig("infura")
|
config, _ := cfg.NewConfig("infura")
|
||||||
blockchain := geth.NewBlockchain(config.Client.IPCPath)
|
blockchain := geth.NewBlockchain(config.Client.IPCPath)
|
||||||
@ -109,8 +107,8 @@ var _ = Describe("Reading contracts", func() {
|
|||||||
expectedLogZero := core.Log{
|
expectedLogZero := core.Log{
|
||||||
BlockNumber: 4703824,
|
BlockNumber: 4703824,
|
||||||
TxHash: "0xf896bfd1eb539d881a1a31102b78de9f25cd591bf1fe1924b86148c0b205fd5d",
|
TxHash: "0xf896bfd1eb539d881a1a31102b78de9f25cd591bf1fe1924b86148c0b205fd5d",
|
||||||
Address: "0xd26114cd6EE289AccF82350c8d8487fedB8A0C07",
|
Address: "0xd26114cd6ee289accf82350c8d8487fedb8a0c07",
|
||||||
Topics: map[int]string{
|
Topics: core.Topics{
|
||||||
0: "0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef",
|
0: "0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef",
|
||||||
1: "0x000000000000000000000000fbb1b73c4f0bda4f67dca266ce6ef42f520fbb98",
|
1: "0x000000000000000000000000fbb1b73c4f0bda4f67dca266ce6ef42f520fbb98",
|
||||||
2: "0x000000000000000000000000d26114cd6ee289accf82350c8d8487fedb8a0c07",
|
2: "0x000000000000000000000000d26114cd6ee289accf82350c8d8487fedb8a0c07",
|
||||||
|
@ -52,6 +52,8 @@ var _ = Describe("Reading from the Geth blockchain", func() {
|
|||||||
|
|
||||||
Expect(node.GenesisBlock).To(Equal(devNetworkGenesisBlock))
|
Expect(node.GenesisBlock).To(Equal(devNetworkGenesisBlock))
|
||||||
Expect(node.NetworkId).To(Equal(devNetworkNodeId))
|
Expect(node.NetworkId).To(Equal(devNetworkNodeId))
|
||||||
|
Expect(len(node.Id)).To(Equal(128))
|
||||||
|
Expect(node.ClientName).To(ContainSubstring("Geth"))
|
||||||
|
|
||||||
close(done)
|
close(done)
|
||||||
}, 15)
|
}, 15)
|
||||||
|
9
main.go
Normal file
9
main.go
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/vulcanize/vulcanizedb/cmd"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
cmd.Execute()
|
||||||
|
}
|
@ -4,7 +4,7 @@ type Log struct {
|
|||||||
BlockNumber int64
|
BlockNumber int64
|
||||||
TxHash string
|
TxHash string
|
||||||
Address string
|
Address string
|
||||||
Topics map[int]string
|
Topics
|
||||||
Index int64
|
Index int64
|
||||||
Data string
|
Data string
|
||||||
}
|
}
|
||||||
|
@ -3,4 +3,6 @@ package core
|
|||||||
type Node struct {
|
type Node struct {
|
||||||
GenesisBlock string
|
GenesisBlock string
|
||||||
NetworkId float64
|
NetworkId float64
|
||||||
|
Id string
|
||||||
|
ClientName string
|
||||||
}
|
}
|
||||||
|
3
pkg/core/topics.go
Normal file
3
pkg/core/topics.go
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
package core
|
||||||
|
|
||||||
|
type Topics [4]string
|
@ -9,5 +9,5 @@ type Transaction struct {
|
|||||||
GasLimit int64
|
GasLimit int64
|
||||||
GasPrice int64
|
GasPrice int64
|
||||||
Receipt
|
Receipt
|
||||||
Value int64
|
Value string
|
||||||
}
|
}
|
||||||
|
@ -50,7 +50,7 @@ func NewBlockchain() *Blockchain {
|
|||||||
blocks: make(map[int64]core.Block),
|
blocks: make(map[int64]core.Block),
|
||||||
logs: make(map[string][]core.Log),
|
logs: make(map[string][]core.Log),
|
||||||
contractAttributes: make(map[string]map[string]string),
|
contractAttributes: make(map[string]map[string]string),
|
||||||
node: core.Node{GenesisBlock: "GENESIS"},
|
node: core.Node{GenesisBlock: "GENESIS", NetworkId: 1, Id: "x123", ClientName: "Geth"},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
64
pkg/filters/filter_query.go
Normal file
64
pkg/filters/filter_query.go
Normal file
@ -0,0 +1,64 @@
|
|||||||
|
package filters
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
|
||||||
|
"errors"
|
||||||
|
|
||||||
|
"github.com/vulcanize/vulcanizedb/pkg/core"
|
||||||
|
"github.com/ethereum/go-ethereum/common"
|
||||||
|
"github.com/ethereum/go-ethereum/common/hexutil"
|
||||||
|
)
|
||||||
|
|
||||||
|
type LogFilters []LogFilter
|
||||||
|
|
||||||
|
type LogFilter struct {
|
||||||
|
Name string `json:"name"`
|
||||||
|
FromBlock int64 `json:"fromBlock"`
|
||||||
|
ToBlock int64 `json:"toBlock"`
|
||||||
|
Address string `json:"address"`
|
||||||
|
core.Topics `json:"topics"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (filterQuery *LogFilter) UnmarshalJSON(input []byte) error {
|
||||||
|
type Alias LogFilter
|
||||||
|
|
||||||
|
var err error
|
||||||
|
aux := &struct {
|
||||||
|
ToBlock string `json:"toBlock"`
|
||||||
|
FromBlock string `json:"fromBlock"`
|
||||||
|
*Alias
|
||||||
|
}{
|
||||||
|
Alias: (*Alias)(filterQuery),
|
||||||
|
}
|
||||||
|
if err := json.Unmarshal(input, &aux); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if filterQuery.Name == "" {
|
||||||
|
return errors.New("filters: must provide name for logfilter")
|
||||||
|
}
|
||||||
|
filterQuery.ToBlock, err = filterQuery.unmarshalFromToBlock(aux.ToBlock)
|
||||||
|
if err != nil {
|
||||||
|
return errors.New("filters: invalid fromBlock")
|
||||||
|
}
|
||||||
|
filterQuery.FromBlock, err = filterQuery.unmarshalFromToBlock(aux.FromBlock)
|
||||||
|
if err != nil {
|
||||||
|
return errors.New("filters: invalid fromBlock")
|
||||||
|
}
|
||||||
|
if !common.IsHexAddress(filterQuery.Address) {
|
||||||
|
return errors.New("filters: invalid address")
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (filterQuery *LogFilter) unmarshalFromToBlock(auxBlock string) (int64, error) {
|
||||||
|
if auxBlock == "" {
|
||||||
|
return -1, nil
|
||||||
|
}
|
||||||
|
block, err := hexutil.DecodeUint64(auxBlock)
|
||||||
|
if err != nil {
|
||||||
|
return 0, errors.New("filters: invalid block arg")
|
||||||
|
}
|
||||||
|
return int64(block), nil
|
||||||
|
}
|
125
pkg/filters/filter_test.go
Normal file
125
pkg/filters/filter_test.go
Normal file
@ -0,0 +1,125 @@
|
|||||||
|
package filters_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
|
||||||
|
"github.com/vulcanize/vulcanizedb/pkg/core"
|
||||||
|
"github.com/vulcanize/vulcanizedb/pkg/filters"
|
||||||
|
. "github.com/onsi/ginkgo"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ = Describe("Log filters", func() {
|
||||||
|
It("decodes web3 filter to LogFilter", func() {
|
||||||
|
|
||||||
|
var logFilter filters.LogFilter
|
||||||
|
jsonFilter := []byte(
|
||||||
|
`{
|
||||||
|
"name": "TestEvent",
|
||||||
|
"fromBlock": "0x1",
|
||||||
|
"toBlock": "0x488290",
|
||||||
|
"address": "0x8888f1f195afa192cfee860698584c030f4c9db1",
|
||||||
|
"topics": ["0x000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b", null, "0x000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b", null]
|
||||||
|
}`)
|
||||||
|
err := json.Unmarshal(jsonFilter, &logFilter)
|
||||||
|
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
Expect(logFilter.Name).To(Equal("TestEvent"))
|
||||||
|
Expect(logFilter.FromBlock).To(Equal(int64(1)))
|
||||||
|
Expect(logFilter.ToBlock).To(Equal(int64(4752016)))
|
||||||
|
Expect(logFilter.Address).To(Equal("0x8888f1f195afa192cfee860698584c030f4c9db1"))
|
||||||
|
Expect(logFilter.Topics).To(Equal(
|
||||||
|
core.Topics{
|
||||||
|
"0x000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b",
|
||||||
|
"",
|
||||||
|
"0x000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b",
|
||||||
|
""}))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("decodes array of web3 filters to []LogFilter", func() {
|
||||||
|
|
||||||
|
logFilters := make([]filters.LogFilter, 0)
|
||||||
|
jsonFilter := []byte(
|
||||||
|
`[{
|
||||||
|
"name": "TestEvent",
|
||||||
|
"fromBlock": "0x1",
|
||||||
|
"toBlock": "0x488290",
|
||||||
|
"address": "0x8888f1f195afa192cfee860698584c030f4c9db1",
|
||||||
|
"topics": ["0x000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b", null, "0x000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b", null]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "TestEvent2",
|
||||||
|
"fromBlock": "0x3",
|
||||||
|
"toBlock": "0x4",
|
||||||
|
"address": "0xd26114cd6EE289AccF82350c8d8487fedB8A0C07",
|
||||||
|
"topics": ["0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef", "0x0000000000000000000000006b0949d4c6edfe467db78241b7d5566f3c2bb43e", "0x0000000000000000000000005e44c3e467a49c9ca0296a9f130fc433041aaa28"]
|
||||||
|
}]`)
|
||||||
|
err := json.Unmarshal(jsonFilter, &logFilters)
|
||||||
|
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
Expect(len(logFilters)).To(Equal(2))
|
||||||
|
Expect(logFilters[0].Name).To(Equal("TestEvent"))
|
||||||
|
Expect(logFilters[1].Name).To(Equal("TestEvent2"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("requires valid ethereum address", func() {
|
||||||
|
|
||||||
|
var logFilter filters.LogFilter
|
||||||
|
jsonFilter := []byte(
|
||||||
|
`{
|
||||||
|
"name": "TestEvent",
|
||||||
|
"fromBlock": "0x1",
|
||||||
|
"toBlock": "0x2",
|
||||||
|
"address": "0x8888f1f195afa192cf84c030f4c9db1",
|
||||||
|
"topics": ["0x000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b", null, "0x000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b", null]
|
||||||
|
}`)
|
||||||
|
err := json.Unmarshal(jsonFilter, &logFilter)
|
||||||
|
Expect(err).To(HaveOccurred())
|
||||||
|
|
||||||
|
})
|
||||||
|
It("requires name", func() {
|
||||||
|
|
||||||
|
var logFilter filters.LogFilter
|
||||||
|
jsonFilter := []byte(
|
||||||
|
`{
|
||||||
|
"fromBlock": "0x1",
|
||||||
|
"toBlock": "0x2",
|
||||||
|
"address": "0x8888f1f195afa192cfee860698584c030f4c9db1",
|
||||||
|
"topics": ["0x000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b", null, "0x000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b", null]
|
||||||
|
}`)
|
||||||
|
err := json.Unmarshal(jsonFilter, &logFilter)
|
||||||
|
Expect(err).To(HaveOccurred())
|
||||||
|
|
||||||
|
})
|
||||||
|
|
||||||
|
It("maps missing fromBlock to -1", func() {
|
||||||
|
|
||||||
|
var logFilter filters.LogFilter
|
||||||
|
jsonFilter := []byte(
|
||||||
|
`{
|
||||||
|
"name": "TestEvent",
|
||||||
|
"toBlock": "0x2",
|
||||||
|
"address": "0x8888f1f195afa192cfee860698584c030f4c9db1",
|
||||||
|
"topics": ["0x000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b", null, "0x000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b", null]
|
||||||
|
}`)
|
||||||
|
err := json.Unmarshal(jsonFilter, &logFilter)
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
Expect(logFilter.FromBlock).To(Equal(int64(-1)))
|
||||||
|
|
||||||
|
})
|
||||||
|
|
||||||
|
It("maps missing toBlock to -1", func() {
|
||||||
|
var logFilter filters.LogFilter
|
||||||
|
jsonFilter := []byte(
|
||||||
|
`{
|
||||||
|
"name": "TestEvent",
|
||||||
|
"address": "0x8888f1f195afa192cfee860698584c030f4c9db1",
|
||||||
|
"topics": ["0x000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b", null, "0x000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b", null]
|
||||||
|
}`)
|
||||||
|
err := json.Unmarshal(jsonFilter, &logFilter)
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
Expect(logFilter.ToBlock).To(Equal(int64(-1)))
|
||||||
|
|
||||||
|
})
|
||||||
|
|
||||||
|
})
|
13
pkg/filters/query_builder_suite_test.go
Normal file
13
pkg/filters/query_builder_suite_test.go
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
package filters_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
. "github.com/onsi/ginkgo"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestQueryBuilder(t *testing.T) {
|
||||||
|
RegisterFailHandler(Fail)
|
||||||
|
RunSpecs(t, "QueryBuilder Suite")
|
||||||
|
}
|
@ -38,6 +38,19 @@ func NewEtherScanClient(url string) *EtherScanApi {
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func GenUrl(network string) string {
|
||||||
|
switch network {
|
||||||
|
case "ropsten":
|
||||||
|
return "https://ropsten.etherscan.io"
|
||||||
|
case "kovan":
|
||||||
|
return "https://kovan.etherscan.io"
|
||||||
|
case "rinkeby":
|
||||||
|
return "https://rinkeby.etherscan.io"
|
||||||
|
default:
|
||||||
|
return "https://api.etherscan.io"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
//https://api.etherscan.io/api?module=contract&action=getabi&address=%s
|
//https://api.etherscan.io/api?module=contract&action=getabi&address=%s
|
||||||
func (e *EtherScanApi) GetAbi(contractHash string) (string, error) {
|
func (e *EtherScanApi) GetAbi(contractHash string) (string, error) {
|
||||||
target := new(Response)
|
target := new(Response)
|
||||||
|
@ -101,5 +101,22 @@ var _ = Describe("ABI files", func() {
|
|||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
Describe("Generating etherscan endpoints based on network", func() {
|
||||||
|
It("should return the main endpoint as the default", func() {
|
||||||
|
url := geth.GenUrl("")
|
||||||
|
Expect(url).To(Equal("https://api.etherscan.io"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("generates various test network endpoint if test network is supplied", func() {
|
||||||
|
ropstenUrl := geth.GenUrl("ropsten")
|
||||||
|
rinkebyUrl := geth.GenUrl("rinkeby")
|
||||||
|
kovanUrl := geth.GenUrl("kovan")
|
||||||
|
|
||||||
|
Expect(ropstenUrl).To(Equal("https://ropsten.etherscan.io"))
|
||||||
|
Expect(kovanUrl).To(Equal("https://kovan.etherscan.io"))
|
||||||
|
Expect(rinkebyUrl).To(Equal("https://rinkeby.etherscan.io"))
|
||||||
|
})
|
||||||
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
@ -25,7 +25,7 @@ func ToCoreBlock(gethBlock *types.Block, client GethClient) core.Block {
|
|||||||
GasLimit: gethBlock.GasLimit().Int64(),
|
GasLimit: gethBlock.GasLimit().Int64(),
|
||||||
GasUsed: gethBlock.GasUsed().Int64(),
|
GasUsed: gethBlock.GasUsed().Int64(),
|
||||||
Hash: gethBlock.Hash().Hex(),
|
Hash: gethBlock.Hash().Hex(),
|
||||||
Miner: gethBlock.Coinbase().Hex(),
|
Miner: strings.ToLower(gethBlock.Coinbase().Hex()),
|
||||||
Nonce: hexutil.Encode(gethBlock.Header().Nonce[:]),
|
Nonce: hexutil.Encode(gethBlock.Header().Nonce[:]),
|
||||||
Number: gethBlock.Number().Int64(),
|
Number: gethBlock.Number().Int64(),
|
||||||
ParentHash: gethBlock.ParentHash().Hex(),
|
ParentHash: gethBlock.ParentHash().Hex(),
|
||||||
@ -58,6 +58,10 @@ func convertTransactionsToCore(gethBlock *types.Block, client GethClient) []core
|
|||||||
|
|
||||||
func appendReceiptToTransaction(client GethClient, transaction core.Transaction) (core.Transaction, error) {
|
func appendReceiptToTransaction(client GethClient, transaction core.Transaction) (core.Transaction, error) {
|
||||||
gethReceipt, err := client.TransactionReceipt(context.Background(), common.HexToHash(transaction.Hash))
|
gethReceipt, err := client.TransactionReceipt(context.Background(), common.HexToHash(transaction.Hash))
|
||||||
|
if err != nil {
|
||||||
|
log.Println(err)
|
||||||
|
return transaction, err
|
||||||
|
}
|
||||||
receipt := ReceiptToCoreReceipt(gethReceipt)
|
receipt := ReceiptToCoreReceipt(gethReceipt)
|
||||||
transaction.Receipt = receipt
|
transaction.Receipt = receipt
|
||||||
return transaction, err
|
return transaction, err
|
||||||
@ -72,7 +76,7 @@ func transToCoreTrans(transaction *types.Transaction, from *common.Address) core
|
|||||||
From: strings.ToLower(addressToHex(from)),
|
From: strings.ToLower(addressToHex(from)),
|
||||||
GasLimit: transaction.Gas().Int64(),
|
GasLimit: transaction.Gas().Int64(),
|
||||||
GasPrice: transaction.GasPrice().Int64(),
|
GasPrice: transaction.GasPrice().Int64(),
|
||||||
Value: transaction.Value().Int64(),
|
Value: transaction.Value().String(),
|
||||||
Data: data,
|
Data: data,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -246,7 +246,7 @@ var _ = Describe("Conversion of GethBlock to core.Block", func() {
|
|||||||
Expect(coreTransaction.From).To(Equal("0x0000000000000000000000000000000000000123"))
|
Expect(coreTransaction.From).To(Equal("0x0000000000000000000000000000000000000123"))
|
||||||
Expect(coreTransaction.GasLimit).To(Equal(gethTransaction.Gas().Int64()))
|
Expect(coreTransaction.GasLimit).To(Equal(gethTransaction.Gas().Int64()))
|
||||||
Expect(coreTransaction.GasPrice).To(Equal(gethTransaction.GasPrice().Int64()))
|
Expect(coreTransaction.GasPrice).To(Equal(gethTransaction.GasPrice().Int64()))
|
||||||
Expect(coreTransaction.Value).To(Equal(gethTransaction.Value().Int64()))
|
Expect(coreTransaction.Value).To(Equal(gethTransaction.Value().String()))
|
||||||
Expect(coreTransaction.Nonce).To(Equal(gethTransaction.Nonce()))
|
Expect(coreTransaction.Nonce).To(Equal(gethTransaction.Nonce()))
|
||||||
|
|
||||||
coreReceipt := coreTransaction.Receipt
|
coreReceipt := coreTransaction.Receipt
|
||||||
|
@ -3,6 +3,10 @@ package geth
|
|||||||
import (
|
import (
|
||||||
"math/big"
|
"math/big"
|
||||||
|
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"log"
|
||||||
|
|
||||||
"github.com/vulcanize/vulcanizedb/pkg/core"
|
"github.com/vulcanize/vulcanizedb/pkg/core"
|
||||||
"github.com/vulcanize/vulcanizedb/pkg/geth/node"
|
"github.com/vulcanize/vulcanizedb/pkg/geth/node"
|
||||||
"github.com/ethereum/go-ethereum"
|
"github.com/ethereum/go-ethereum"
|
||||||
@ -23,13 +27,27 @@ type Blockchain struct {
|
|||||||
|
|
||||||
func NewBlockchain(ipcPath string) *Blockchain {
|
func NewBlockchain(ipcPath string) *Blockchain {
|
||||||
blockchain := Blockchain{}
|
blockchain := Blockchain{}
|
||||||
rpcClient, _ := rpc.Dial(ipcPath)
|
rpcClient, err := rpc.Dial(ipcPath)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
client := ethclient.NewClient(rpcClient)
|
client := ethclient.NewClient(rpcClient)
|
||||||
blockchain.node = node.Retrieve(rpcClient)
|
blockchain.node = node.Info(rpcClient)
|
||||||
|
if infura := isInfuraNode(ipcPath); infura {
|
||||||
|
blockchain.node.Id = "infura"
|
||||||
|
blockchain.node.ClientName = "infura"
|
||||||
|
}
|
||||||
blockchain.client = client
|
blockchain.client = client
|
||||||
return &blockchain
|
return &blockchain
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func isInfuraNode(ipcPath string) bool {
|
||||||
|
if strings.Contains(ipcPath, "infura") {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
func (blockchain *Blockchain) GetLogs(contract core.Contract, startingBlockNumber *big.Int, endingBlockNumber *big.Int) ([]core.Log, error) {
|
func (blockchain *Blockchain) GetLogs(contract core.Contract, startingBlockNumber *big.Int, endingBlockNumber *big.Int) ([]core.Log, error) {
|
||||||
if endingBlockNumber == nil {
|
if endingBlockNumber == nil {
|
||||||
endingBlockNumber = startingBlockNumber
|
endingBlockNumber = startingBlockNumber
|
||||||
@ -44,7 +62,7 @@ func (blockchain *Blockchain) GetLogs(contract core.Contract, startingBlockNumbe
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return []core.Log{}, err
|
return []core.Log{}, err
|
||||||
}
|
}
|
||||||
logs := GethLogsToCoreLogs(gethLogs)
|
logs := ToCoreLogs(gethLogs)
|
||||||
return logs, nil
|
return logs, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,19 +1,36 @@
|
|||||||
package geth
|
package geth
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"strings"
|
||||||
|
|
||||||
"github.com/vulcanize/vulcanizedb/pkg/core"
|
"github.com/vulcanize/vulcanizedb/pkg/core"
|
||||||
|
"github.com/ethereum/go-ethereum/common"
|
||||||
"github.com/ethereum/go-ethereum/common/hexutil"
|
"github.com/ethereum/go-ethereum/common/hexutil"
|
||||||
"github.com/ethereum/go-ethereum/core/types"
|
"github.com/ethereum/go-ethereum/core/types"
|
||||||
)
|
)
|
||||||
|
|
||||||
func LogToCoreLog(gethLog types.Log) core.Log {
|
func ToCoreLogs(gethLogs []types.Log) []core.Log {
|
||||||
topics := gethLog.Topics
|
var logs []core.Log
|
||||||
var hexTopics = make(map[int]string)
|
for _, log := range gethLogs {
|
||||||
|
log := ToCoreLog(log)
|
||||||
|
logs = append(logs, log)
|
||||||
|
}
|
||||||
|
return logs
|
||||||
|
}
|
||||||
|
|
||||||
|
func makeTopics(topics []common.Hash) core.Topics {
|
||||||
|
var hexTopics core.Topics
|
||||||
for i, topic := range topics {
|
for i, topic := range topics {
|
||||||
hexTopics[i] = topic.Hex()
|
hexTopics[i] = topic.Hex()
|
||||||
}
|
}
|
||||||
|
return hexTopics
|
||||||
|
}
|
||||||
|
|
||||||
|
func ToCoreLog(gethLog types.Log) core.Log {
|
||||||
|
topics := gethLog.Topics
|
||||||
|
hexTopics := makeTopics(topics)
|
||||||
return core.Log{
|
return core.Log{
|
||||||
Address: gethLog.Address.Hex(),
|
Address: strings.ToLower(gethLog.Address.Hex()),
|
||||||
|
|
||||||
BlockNumber: int64(gethLog.BlockNumber),
|
BlockNumber: int64(gethLog.BlockNumber),
|
||||||
Topics: hexTopics,
|
Topics: hexTopics,
|
||||||
@ -22,12 +39,3 @@ func LogToCoreLog(gethLog types.Log) core.Log {
|
|||||||
Data: hexutil.Encode(gethLog.Data),
|
Data: hexutil.Encode(gethLog.Data),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func GethLogsToCoreLogs(gethLogs []types.Log) []core.Log {
|
|
||||||
var logs []core.Log
|
|
||||||
for _, log := range gethLogs {
|
|
||||||
log := LogToCoreLog(log)
|
|
||||||
logs = append(logs, log)
|
|
||||||
}
|
|
||||||
return logs
|
|
||||||
}
|
|
||||||
|
@ -1,6 +1,8 @@
|
|||||||
package geth_test
|
package geth_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"strings"
|
||||||
|
|
||||||
"github.com/vulcanize/vulcanizedb/pkg/core"
|
"github.com/vulcanize/vulcanizedb/pkg/core"
|
||||||
"github.com/vulcanize/vulcanizedb/pkg/geth"
|
"github.com/vulcanize/vulcanizedb/pkg/geth"
|
||||||
"github.com/ethereum/go-ethereum/common"
|
"github.com/ethereum/go-ethereum/common"
|
||||||
@ -14,7 +16,7 @@ var _ = Describe("Conversion of GethLog to core.Log", func() {
|
|||||||
|
|
||||||
It("converts geth log to internal log format", func() {
|
It("converts geth log to internal log format", func() {
|
||||||
gethLog := types.Log{
|
gethLog := types.Log{
|
||||||
Address: common.HexToAddress("0xecf8f87f810ecf450940c9f60066b4a7a501d6a7"),
|
Address: common.HexToAddress("0x448a5065aeBB8E423F0896E6c5D525C040f59af3"),
|
||||||
BlockHash: common.HexToHash("0x656c34545f90a730a19008c0e7a7cd4fb3895064b48d6d69761bd5abad681056"),
|
BlockHash: common.HexToHash("0x656c34545f90a730a19008c0e7a7cd4fb3895064b48d6d69761bd5abad681056"),
|
||||||
BlockNumber: 2019236,
|
BlockNumber: 2019236,
|
||||||
Data: hexutil.MustDecode("0x000000000000000000000000000000000000000000000001a055690d9db80000"),
|
Data: hexutil.MustDecode("0x000000000000000000000000000000000000000000000001a055690d9db80000"),
|
||||||
@ -28,18 +30,18 @@ var _ = Describe("Conversion of GethLog to core.Log", func() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
expected := core.Log{
|
expected := core.Log{
|
||||||
Address: gethLog.Address.Hex(),
|
Address: strings.ToLower(gethLog.Address.Hex()),
|
||||||
BlockNumber: int64(gethLog.BlockNumber),
|
BlockNumber: int64(gethLog.BlockNumber),
|
||||||
Data: hexutil.Encode(gethLog.Data),
|
Data: hexutil.Encode(gethLog.Data),
|
||||||
TxHash: gethLog.TxHash.Hex(),
|
TxHash: gethLog.TxHash.Hex(),
|
||||||
Index: 2,
|
Index: 2,
|
||||||
Topics: map[int]string{
|
Topics: core.Topics{
|
||||||
0: common.HexToHash("0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef").Hex(),
|
gethLog.Topics[0].Hex(),
|
||||||
1: common.HexToHash("0x00000000000000000000000080b2c9d7cbbf30a1b0fc8983c647d754c6525615").Hex(),
|
gethLog.Topics[1].Hex(),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
coreLog := geth.LogToCoreLog(gethLog)
|
coreLog := geth.ToCoreLog(gethLog)
|
||||||
|
|
||||||
Expect(coreLog.Address).To(Equal(expected.Address))
|
Expect(coreLog.Address).To(Equal(expected.Address))
|
||||||
Expect(coreLog.BlockNumber).To(Equal(expected.BlockNumber))
|
Expect(coreLog.BlockNumber).To(Equal(expected.BlockNumber))
|
||||||
@ -79,10 +81,10 @@ var _ = Describe("Conversion of GethLog to core.Log", func() {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
expectedOne := geth.LogToCoreLog(gethLogOne)
|
expectedOne := geth.ToCoreLog(gethLogOne)
|
||||||
expectedTwo := geth.LogToCoreLog(gethLogTwo)
|
expectedTwo := geth.ToCoreLog(gethLogTwo)
|
||||||
|
|
||||||
coreLogs := geth.GethLogsToCoreLogs([]types.Log{gethLogOne, gethLogTwo})
|
coreLogs := geth.ToCoreLogs([]types.Log{gethLogOne, gethLogTwo})
|
||||||
|
|
||||||
Expect(len(coreLogs)).To(Equal(2))
|
Expect(len(coreLogs)).To(Equal(2))
|
||||||
Expect(coreLogs[0]).To(Equal(expectedOne))
|
Expect(coreLogs[0]).To(Equal(expectedOne))
|
||||||
|
@ -3,30 +3,47 @@ package node
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
|
||||||
|
"strconv"
|
||||||
|
|
||||||
"github.com/vulcanize/vulcanizedb/pkg/core"
|
"github.com/vulcanize/vulcanizedb/pkg/core"
|
||||||
|
"github.com/ethereum/go-ethereum/core/types"
|
||||||
"github.com/ethereum/go-ethereum/p2p"
|
"github.com/ethereum/go-ethereum/p2p"
|
||||||
"github.com/ethereum/go-ethereum/rpc"
|
"github.com/ethereum/go-ethereum/rpc"
|
||||||
)
|
)
|
||||||
|
|
||||||
func Retrieve(client *rpc.Client) core.Node {
|
func Info(client *rpc.Client) core.Node {
|
||||||
var info p2p.NodeInfo
|
|
||||||
node := core.Node{}
|
node := core.Node{}
|
||||||
client.CallContext(context.Background(), &info, "admin_nodeInfo")
|
node.NetworkId = NetworkId(client)
|
||||||
for protocolName, protocol := range info.Protocols {
|
node.GenesisBlock = GenesisBlock(client)
|
||||||
if protocolName == "eth" {
|
node.Id, node.ClientName = IdClientName(client)
|
||||||
protocolMap, _ := protocol.(map[string]interface{})
|
|
||||||
node.GenesisBlock = getAttribute(protocolMap, "genesis").(string)
|
|
||||||
node.NetworkId = getAttribute(protocolMap, "network").(float64)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return node
|
return node
|
||||||
}
|
}
|
||||||
|
|
||||||
func getAttribute(protocolMap map[string]interface{}, protocol string) interface{} {
|
func IdClientName(client *rpc.Client) (string, string) {
|
||||||
for key, val := range protocolMap {
|
var info p2p.NodeInfo
|
||||||
if key == protocol {
|
modules, _ := client.SupportedModules()
|
||||||
return val
|
if _, ok := modules["admin"]; ok {
|
||||||
}
|
client.CallContext(context.Background(), &info, "admin_nodeInfo")
|
||||||
|
return info.ID, info.Name
|
||||||
}
|
}
|
||||||
return nil
|
return "", ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func NetworkId(client *rpc.Client) float64 {
|
||||||
|
var version string
|
||||||
|
client.CallContext(context.Background(), &version, "net_version")
|
||||||
|
networkId, _ := strconv.ParseFloat(version, 64)
|
||||||
|
return networkId
|
||||||
|
}
|
||||||
|
|
||||||
|
func ProtocolVersion(client *rpc.Client) string {
|
||||||
|
var protocolVersion string
|
||||||
|
client.CallContext(context.Background(), &protocolVersion, "eth_protocolVersion")
|
||||||
|
return protocolVersion
|
||||||
|
}
|
||||||
|
|
||||||
|
func GenesisBlock(client *rpc.Client) string {
|
||||||
|
var header *types.Header
|
||||||
|
client.CallContext(context.Background(), &header, "eth_getBlockByNumber", "0x0", false)
|
||||||
|
return header.Hash().Hex()
|
||||||
}
|
}
|
||||||
|
@ -49,7 +49,7 @@ func setContractAddress(gethReceipt *types.Receipt) string {
|
|||||||
func dereferenceLogs(gethReceipt *types.Receipt) []core.Log {
|
func dereferenceLogs(gethReceipt *types.Receipt) []core.Log {
|
||||||
logs := []core.Log{}
|
logs := []core.Log{}
|
||||||
for _, log := range gethReceipt.Logs {
|
for _, log := range gethReceipt.Logs {
|
||||||
logs = append(logs, LogToCoreLog(*log))
|
logs = append(logs, ToCoreLog(*log))
|
||||||
}
|
}
|
||||||
return logs
|
return logs
|
||||||
}
|
}
|
||||||
|
@ -1 +1 @@
|
|||||||
[{"constant":true,"inputs":[],"name":"mintingFinished","outputs":[{"name":"","type":"bool"}],"payable":false,"type":"function"},{"constant":true,"inputs":[],"name":"name","outputs":[{"name":"","type":"string"}],"payable":false,"type":"function"},{"constant":false,"inputs":[{"name":"_spender","type":"address"},{"name":"_value","type":"uint256"}],"name":"approve","outputs":[],"payable":false,"type":"function"},{"constant":true,"inputs":[],"name":"totalSupply","outputs":[{"name":"","type":"uint256"}],"payable":false,"type":"function"},{"constant":false,"inputs":[{"name":"_from","type":"address"},{"name":"_to","type":"address"},{"name":"_value","type":"uint256"}],"name":"transferFrom","outputs":[],"payable":false,"type":"function"},{"constant":true,"inputs":[],"name":"decimals","outputs":[{"name":"","type":"uint256"}],"payable":false,"type":"function"},{"constant":false,"inputs":[],"name":"unpause","outputs":[{"name":"","type":"bool"}],"payable":false,"type":"function"},{"constant":false,"inputs":[{"name":"_to","type":"address"},{"name":"_amount","type":"uint256"}],"name":"mint","outputs":[{"name":"","type":"bool"}],"payable":false,"type":"function"},{"constant":true,"inputs":[],"name":"paused","outputs":[{"name":"","type":"bool"}],"payable":false,"type":"function"},{"constant":true,"inputs":[{"name":"_owner","type":"address"}],"name":"balanceOf","outputs":[{"name":"balance","type":"uint256"}],"payable":false,"type":"function"},{"constant":false,"inputs":[],"name":"finishMinting","outputs":[{"name":"","type":"bool"}],"payable":false,"type":"function"},{"constant":false,"inputs":[],"name":"pause","outputs":[{"name":"","type":"bool"}],"payable":false,"type":"function"},{"constant":true,"inputs":[],"name":"owner","outputs":[{"name":"","type":"address"}],"payable":false,"type":"function"},{"constant":true,"inputs":[],"name":"symbol","outputs":[{"name":"","type":"string"}],"payable":false,"type":"function"},{"constant":false,"inputs":[{"name":"_to","type":"address"},{"name":"_value","type":"uint256"}],"name":"transfer","outputs":[],"payable":false,"type":"function"},{"constant":false,"inputs":[{"name":"_to","type":"address"},{"name":"_amount","type":"uint256"},{"name":"_releaseTime","type":"uint256"}],"name":"mintTimelocked","outputs":[{"name":"","type":"address"}],"payable":false,"type":"function"},{"constant":true,"inputs":[{"name":"_owner","type":"address"},{"name":"_spender","type":"address"}],"name":"allowance","outputs":[{"name":"remaining","type":"uint256"}],"payable":false,"type":"function"},{"constant":false,"inputs":[{"name":"newOwner","type":"address"}],"name":"transferOwnership","outputs":[],"payable":false,"type":"function"},{"anonymous":false,"inputs":[{"indexed":true,"name":"to","type":"address"},{"indexed":false,"name":"value","type":"uint256"}],"name":"Mint","type":"event"},{"anonymous":false,"inputs":[],"name":"MintFinished","type":"event"},{"anonymous":false,"inputs":[],"name":"Pause","type":"event"},{"anonymous":false,"inputs":[],"name":"Unpause","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"owner","type":"address"},{"indexed":true,"name":"spender","type":"address"},{"indexed":false,"name":"value","type":"uint256"}],"name":"Approval","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"from","type":"address"},{"indexed":true,"name":"to","type":"address"},{"indexed":false,"name":"value","type":"uint256"}],"name":"Transfer","type":"event"}]
|
[{"constant":true,"inputs":[],"name":"mintingFinished","outputs":[{"name":"","type":"bool"}],"payable":false,"type":"function"},{"constant":true,"inputs":[],"name":"name","outputs":[{"name":"","type":"string"}],"payable":false,"type":"function"},{"constant":false,"inputs":[{"name":"_spender","type":"address"},{"name":"_value","type":"uint256"}],"name":"approve","outputs":[],"payable":false,"type":"function"},{"constant":true,"inputs":[],"name":"totalSupply","outputs":[{"name":"","type":"uint256"}],"payable":false,"type":"function"},{"constant":false,"inputs":[{"name":"_from","type":"address"},{"name":"_to","type":"address"},{"name":"_value","type":"uint256"}],"name":"transferFrom","outputs":[],"payable":false,"type":"function"},{"constant":true,"inputs":[],"name":"decimals","outputs":[{"name":"","type":"uint256"}],"payable":false,"type":"function"},{"constant":false,"inputs":[],"name":"unpause","outputs":[{"name":"","type":"bool"}],"payable":false,"type":"function"},{"constant":false,"inputs":[{"name":"_to","type":"address"},{"name":"_amount","type":"uint256"}],"name":"mint","outputs":[{"name":"","type":"bool"}],"payable":false,"type":"function"},{"constant":true,"inputs":[],"name":"paused","outputs":[{"name":"","type":"bool"}],"payable":false,"type":"function"},{"constant":true,"inputs":[{"name":"_owner","type":"address"}],"name":"balanceOf","outputs":[{"name":"balance","type":"uint256"}],"payable":false,"type":"function"},{"constant":false,"inputs":[],"name":"finishMinting","outputs":[{"name":"","type":"bool"}],"payable":false,"type":"function"},{"constant":false,"inputs":[],"name":"pause","outputs":[{"name":"","type":"bool"}],"payable":false,"type":"function"},{"constant":true,"inputs":[],"name":"owner","outputs":[{"name":"","type":"address"}],"payable":false,"type":"function"},{"constant":true,"inputs":[],"name":"symbol","outputs":[{"name":"","type":"string"}],"payable":false,"type":"function"},{"constant":false,"inputs":[{"name":"_to","type":"address"},{"name":"_value","type":"uint256"}],"name":"transfer","outputs":[],"payable":false,"type":"function"},{"constant":false,"inputs":[{"name":"_to","type":"address"},{"name":"_amount","type":"uint256"},{"name":"_releaseTime","type":"uint256"}],"name":"mintTimelocked","outputs":[{"name":"","type":"address"}],"payable":false,"type":"function"},{"constant":true,"inputs":[{"name":"_owner","type":"address"},{"name":"_spender","type":"address"}],"name":"allowance","outputs":[{"name":"remaining","type":"uint256"}],"payable":false,"type":"function"},{"constant":false,"inputs":[{"name":"newOwner","type":"address"}],"name":"transferOwnership","outputs":[],"payable":false,"type":"function"},{"anonymous":false,"inputs":[{"indexed":true,"name":"to","type":"address"},{"indexed":false,"name":"value","type":"uint256"}],"name":"Mint","type":"event"},{"anonymous":false,"inputs":[],"name":"MintFinished","type":"event"},{"anonymous":false,"inputs":[],"name":"Pause","type":"event"},{"anonymous":false,"inputs":[],"name":"Unpause","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"owner","type":"address"},{"indexed":true,"name":"spender","type":"address"},{"indexed":false,"name":"value","type":"uint256"}],"name":"Approval","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"from","type":"address"},{"indexed":true,"name":"to","type":"address"},{"indexed":false,"name":"value","type":"uint256"}],"name":"Transfer","type":"event"}]
|
@ -3,7 +3,10 @@ package repositories
|
|||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
|
||||||
|
"errors"
|
||||||
|
|
||||||
"github.com/vulcanize/vulcanizedb/pkg/core"
|
"github.com/vulcanize/vulcanizedb/pkg/core"
|
||||||
|
"github.com/vulcanize/vulcanizedb/pkg/filters"
|
||||||
)
|
)
|
||||||
|
|
||||||
type InMemory struct {
|
type InMemory struct {
|
||||||
@ -11,16 +14,27 @@ type InMemory struct {
|
|||||||
receipts map[string]core.Receipt
|
receipts map[string]core.Receipt
|
||||||
contracts map[string]core.Contract
|
contracts map[string]core.Contract
|
||||||
logs map[string][]core.Log
|
logs map[string][]core.Log
|
||||||
|
logFilters map[string]filters.LogFilter
|
||||||
CreateOrUpdateBlockCallCount int
|
CreateOrUpdateBlockCallCount int
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (repository *InMemory) AddFilter(filter filters.LogFilter) error {
|
||||||
|
key := filter.Name
|
||||||
|
if _, ok := repository.logFilters[key]; ok || key == "" {
|
||||||
|
return errors.New("filter name not unique")
|
||||||
|
}
|
||||||
|
repository.logFilters[key] = filter
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
func NewInMemory() *InMemory {
|
func NewInMemory() *InMemory {
|
||||||
return &InMemory{
|
return &InMemory{
|
||||||
CreateOrUpdateBlockCallCount: 0,
|
CreateOrUpdateBlockCallCount: 0,
|
||||||
blocks: make(map[int64]core.Block),
|
blocks: make(map[int64]core.Block),
|
||||||
receipts: make(map[string]core.Receipt),
|
receipts: make(map[string]core.Receipt),
|
||||||
contracts: make(map[string]core.Contract),
|
contracts: make(map[string]core.Contract),
|
||||||
logs: make(map[string][]core.Log),
|
logs: make(map[string][]core.Log),
|
||||||
|
logFilters: make(map[string]filters.LogFilter),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -102,6 +116,7 @@ func (repository *InMemory) CreateOrUpdateBlock(block core.Block) error {
|
|||||||
repository.blocks[block.Number] = block
|
repository.blocks[block.Number] = block
|
||||||
for _, transaction := range block.Transactions {
|
for _, transaction := range block.Transactions {
|
||||||
repository.receipts[transaction.Hash] = transaction.Receipt
|
repository.receipts[transaction.Hash] = transaction.Receipt
|
||||||
|
repository.logs[transaction.TxHash] = transaction.Logs
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
@ -11,6 +11,7 @@ import (
|
|||||||
|
|
||||||
"github.com/vulcanize/vulcanizedb/pkg/config"
|
"github.com/vulcanize/vulcanizedb/pkg/config"
|
||||||
"github.com/vulcanize/vulcanizedb/pkg/core"
|
"github.com/vulcanize/vulcanizedb/pkg/core"
|
||||||
|
"github.com/vulcanize/vulcanizedb/pkg/filters"
|
||||||
"github.com/jmoiron/sqlx"
|
"github.com/jmoiron/sqlx"
|
||||||
_ "github.com/lib/pq"
|
_ "github.com/lib/pq"
|
||||||
)
|
)
|
||||||
@ -62,35 +63,6 @@ func (repository Postgres) SetBlocksStatus(chainHead int64) {
|
|||||||
cutoff)
|
cutoff)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (repository Postgres) CreateLogs(logs []core.Log) error {
|
|
||||||
tx, _ := repository.Db.BeginTx(context.Background(), nil)
|
|
||||||
for _, tlog := range logs {
|
|
||||||
_, err := tx.Exec(
|
|
||||||
`INSERT INTO logs (block_number, address, tx_hash, index, topic0, topic1, topic2, topic3, data)
|
|
||||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)
|
|
||||||
ON CONFLICT (index, block_number)
|
|
||||||
DO UPDATE
|
|
||||||
SET block_number = $1,
|
|
||||||
address = $2,
|
|
||||||
tx_hash = $3,
|
|
||||||
index = $4,
|
|
||||||
topic0 = $5,
|
|
||||||
topic1 = $6,
|
|
||||||
topic2 = $7,
|
|
||||||
topic3 = $8,
|
|
||||||
data = $9
|
|
||||||
`,
|
|
||||||
tlog.BlockNumber, tlog.Address, tlog.TxHash, tlog.Index, tlog.Topics[0], tlog.Topics[1], tlog.Topics[2], tlog.Topics[3], tlog.Data,
|
|
||||||
)
|
|
||||||
if err != nil {
|
|
||||||
tx.Rollback()
|
|
||||||
return ErrDBInsertFailed
|
|
||||||
}
|
|
||||||
}
|
|
||||||
tx.Commit()
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (repository Postgres) FindLogs(address string, blockNumber int64) []core.Log {
|
func (repository Postgres) FindLogs(address string, blockNumber int64) []core.Log {
|
||||||
logRows, _ := repository.Db.Query(
|
logRows, _ := repository.Db.Query(
|
||||||
`SELECT block_number,
|
`SELECT block_number,
|
||||||
@ -111,13 +83,16 @@ func (repository Postgres) FindLogs(address string, blockNumber int64) []core.Lo
|
|||||||
func (repository *Postgres) CreateNode(node *core.Node) error {
|
func (repository *Postgres) CreateNode(node *core.Node) error {
|
||||||
var nodeId int64
|
var nodeId int64
|
||||||
err := repository.Db.QueryRow(
|
err := repository.Db.QueryRow(
|
||||||
`INSERT INTO nodes (genesis_block, network_id)
|
`INSERT INTO nodes (genesis_block, network_id, node_id, client_name)
|
||||||
VALUES ($1, $2)
|
VALUES ($1, $2, $3, $4)
|
||||||
ON CONFLICT (genesis_block, network_id)
|
ON CONFLICT (genesis_block, network_id, node_id)
|
||||||
DO UPDATE
|
DO UPDATE
|
||||||
SET genesis_block = $1, network_id = $2
|
SET genesis_block = $1,
|
||||||
|
network_id = $2,
|
||||||
|
node_id = $3,
|
||||||
|
client_name = $4
|
||||||
RETURNING id`,
|
RETURNING id`,
|
||||||
node.GenesisBlock, node.NetworkId).Scan(&nodeId)
|
node.GenesisBlock, node.NetworkId, node.Id, node.ClientName).Scan(&nodeId)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return ErrUnableToSetNode
|
return ErrUnableToSetNode
|
||||||
}
|
}
|
||||||
@ -329,29 +304,90 @@ func (repository Postgres) createTransaction(tx *sql.Tx, blockId int64, transact
|
|||||||
err := tx.QueryRow(
|
err := tx.QueryRow(
|
||||||
`INSERT INTO transactions
|
`INSERT INTO transactions
|
||||||
(block_id, tx_hash, tx_nonce, tx_to, tx_from, tx_gaslimit, tx_gasprice, tx_value, tx_input_data)
|
(block_id, tx_hash, tx_nonce, tx_to, tx_from, tx_gaslimit, tx_gasprice, tx_value, tx_input_data)
|
||||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)
|
VALUES ($1, $2, $3, $4, $5, $6, $7, cast(NULLIF($8, '') AS NUMERIC), $9)
|
||||||
RETURNING id`,
|
RETURNING id`,
|
||||||
blockId, transaction.Hash, transaction.Nonce, transaction.To, transaction.From, transaction.GasLimit, transaction.GasPrice, transaction.Value, transaction.Data).
|
blockId, transaction.Hash, transaction.Nonce, transaction.To, transaction.From, transaction.GasLimit, transaction.GasPrice, transaction.Value, transaction.Data).
|
||||||
Scan(&transactionId)
|
Scan(&transactionId)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if transaction.Receipt.TxHash != "" {
|
if hasReceipt(transaction) {
|
||||||
err = repository.createReceipt(tx, transactionId, transaction.Receipt)
|
receiptId, err := repository.createReceipt(tx, transactionId, transaction.Receipt)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
if hasLogs(transaction) {
|
||||||
|
err = repository.createLogs(tx, transaction.Receipt.Logs, receiptId)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (repository Postgres) createReceipt(tx *sql.Tx, transactionId int, receipt core.Receipt) error {
|
func hasLogs(transaction core.Transaction) bool {
|
||||||
|
return len(transaction.Receipt.Logs) > 0
|
||||||
|
}
|
||||||
|
|
||||||
|
func hasReceipt(transaction core.Transaction) bool {
|
||||||
|
return transaction.Receipt.TxHash != ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func (repository Postgres) createReceipt(tx *sql.Tx, transactionId int, receipt core.Receipt) (int, error) {
|
||||||
//Not currently persisting log bloom filters
|
//Not currently persisting log bloom filters
|
||||||
_, err := tx.Exec(
|
var receiptId int
|
||||||
|
err := tx.QueryRow(
|
||||||
`INSERT INTO receipts
|
`INSERT INTO receipts
|
||||||
(contract_address, tx_hash, cumulative_gas_used, gas_used, state_root, status, transaction_id)
|
(contract_address, tx_hash, cumulative_gas_used, gas_used, state_root, status, transaction_id)
|
||||||
VALUES ($1, $2, $3, $4, $5, $6, $7)`,
|
VALUES ($1, $2, $3, $4, $5, $6, $7)
|
||||||
receipt.ContractAddress, receipt.TxHash, receipt.CumulativeGasUsed, receipt.GasUsed, receipt.StateRoot, receipt.Status, transactionId)
|
RETURNING id`,
|
||||||
|
receipt.ContractAddress, receipt.TxHash, receipt.CumulativeGasUsed, receipt.GasUsed, receipt.StateRoot, receipt.Status, transactionId).Scan(&receiptId)
|
||||||
|
if err != nil {
|
||||||
|
return receiptId, err
|
||||||
|
}
|
||||||
|
return receiptId, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (repository Postgres) createLogs(tx *sql.Tx, logs []core.Log, receiptId int) error {
|
||||||
|
for _, tlog := range logs {
|
||||||
|
_, err := tx.Exec(
|
||||||
|
`INSERT INTO logs (block_number, address, tx_hash, index, topic0, topic1, topic2, topic3, data, receipt_id)
|
||||||
|
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)
|
||||||
|
`,
|
||||||
|
tlog.BlockNumber, tlog.Address, tlog.TxHash, tlog.Index, tlog.Topics[0], tlog.Topics[1], tlog.Topics[2], tlog.Topics[3], tlog.Data, receiptId,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return ErrDBInsertFailed
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (repository Postgres) CreateLogs(logs []core.Log) error {
|
||||||
|
tx, _ := repository.Db.BeginTx(context.Background(), nil)
|
||||||
|
for _, tlog := range logs {
|
||||||
|
_, err := tx.Exec(
|
||||||
|
`INSERT INTO logs (block_number, address, tx_hash, index, topic0, topic1, topic2, topic3, data)
|
||||||
|
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)
|
||||||
|
`,
|
||||||
|
tlog.BlockNumber, tlog.Address, tlog.TxHash, tlog.Index, tlog.Topics[0], tlog.Topics[1], tlog.Topics[2], tlog.Topics[3], tlog.Data,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
tx.Rollback()
|
||||||
|
return ErrDBInsertFailed
|
||||||
|
}
|
||||||
|
}
|
||||||
|
tx.Commit()
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (repository Postgres) AddFilter(query filters.LogFilter) error {
|
||||||
|
_, err := repository.Db.Exec(
|
||||||
|
`INSERT INTO log_filters
|
||||||
|
(name, from_block, to_block, address, topic0, topic1, topic2, topic3)
|
||||||
|
VALUES ($1, NULLIF($2, -1), NULLIF($3, -1), $4, NULLIF($5, ''), NULLIF($6, ''), NULLIF($7, ''), NULLIF($8, ''))`,
|
||||||
|
query.Name, query.FromBlock, query.ToBlock, query.Address, query.Topics[0], query.Topics[1], query.Topics[2], query.Topics[3])
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@ -439,7 +475,7 @@ func (repository Postgres) loadLogs(logsRows *sql.Rows) []core.Log {
|
|||||||
var txHash string
|
var txHash string
|
||||||
var index int64
|
var index int64
|
||||||
var data string
|
var data string
|
||||||
topics := make([]string, 4)
|
var topics core.Topics
|
||||||
logsRows.Scan(&blockNumber, &address, &txHash, &index, &topics[0], &topics[1], &topics[2], &topics[3], &data)
|
logsRows.Scan(&blockNumber, &address, &txHash, &index, &topics[0], &topics[1], &topics[2], &topics[3], &data)
|
||||||
log := core.Log{
|
log := core.Log{
|
||||||
BlockNumber: blockNumber,
|
BlockNumber: blockNumber,
|
||||||
@ -448,7 +484,6 @@ func (repository Postgres) loadLogs(logsRows *sql.Rows) []core.Log {
|
|||||||
Index: index,
|
Index: index,
|
||||||
Data: data,
|
Data: data,
|
||||||
}
|
}
|
||||||
log.Topics = make(map[int]string)
|
|
||||||
for i, topic := range topics {
|
for i, topic := range topics {
|
||||||
log.Topics[i] = topic
|
log.Topics[i] = topic
|
||||||
}
|
}
|
||||||
@ -467,7 +502,7 @@ func (repository Postgres) loadTransactions(transactionRows *sql.Rows) []core.Tr
|
|||||||
var gasLimit int64
|
var gasLimit int64
|
||||||
var gasPrice int64
|
var gasPrice int64
|
||||||
var inputData string
|
var inputData string
|
||||||
var value int64
|
var value string
|
||||||
transactionRows.Scan(&hash, &nonce, &to, &from, &gasLimit, &gasPrice, &value, &inputData)
|
transactionRows.Scan(&hash, &nonce, &to, &from, &gasLimit, &gasPrice, &value, &inputData)
|
||||||
transaction := core.Transaction{
|
transaction := core.Transaction{
|
||||||
Hash: hash,
|
Hash: hash,
|
||||||
|
@ -7,6 +7,8 @@ import (
|
|||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
"log"
|
"log"
|
||||||
|
|
||||||
|
"math/big"
|
||||||
|
|
||||||
"github.com/vulcanize/vulcanizedb/pkg/config"
|
"github.com/vulcanize/vulcanizedb/pkg/config"
|
||||||
"github.com/vulcanize/vulcanizedb/pkg/core"
|
"github.com/vulcanize/vulcanizedb/pkg/core"
|
||||||
"github.com/vulcanize/vulcanizedb/pkg/repositories"
|
"github.com/vulcanize/vulcanizedb/pkg/repositories"
|
||||||
@ -38,6 +40,42 @@ var _ = Describe("Postgres repository", func() {
|
|||||||
return repository
|
return repository
|
||||||
})
|
})
|
||||||
|
|
||||||
|
It("serializes big.Int to db", func() {
|
||||||
|
// postgres driver doesn't support go big.Int type
|
||||||
|
// various casts in golang uint64, int64, overflow for
|
||||||
|
// transaction value (in wei) even though
|
||||||
|
// postgres numeric can handle an arbitrary
|
||||||
|
// sized int, so use string representation of big.Int
|
||||||
|
// and cast on insert
|
||||||
|
|
||||||
|
cfg, _ := config.NewConfig("private")
|
||||||
|
pgConfig := config.DbConnectionString(cfg.Database)
|
||||||
|
db, err := sqlx.Connect("postgres", pgConfig)
|
||||||
|
|
||||||
|
bi := new(big.Int)
|
||||||
|
bi.SetString("34940183920000000000", 10)
|
||||||
|
Expect(bi.String()).To(Equal("34940183920000000000"))
|
||||||
|
|
||||||
|
defer db.Exec(`DROP TABLE IF EXISTS example`)
|
||||||
|
_, err = db.Exec("CREATE TABLE example ( id INTEGER, data NUMERIC )")
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
|
||||||
|
sqlStatement := `
|
||||||
|
INSERT INTO example (id, data)
|
||||||
|
VALUES (1, cast($1 AS NUMERIC))`
|
||||||
|
_, err = db.Exec(sqlStatement, bi.String())
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
|
||||||
|
var data string
|
||||||
|
err = db.QueryRow(`SELECT data FROM example WHERE id = 1`).Scan(&data)
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
|
||||||
|
Expect(bi.String()).To(Equal(data))
|
||||||
|
actual := new(big.Int)
|
||||||
|
actual.SetString(data, 10)
|
||||||
|
Expect(actual).To(Equal(bi))
|
||||||
|
})
|
||||||
|
|
||||||
It("does not commit block if block is invalid", func() {
|
It("does not commit block if block is invalid", func() {
|
||||||
//badNonce violates db Nonce field length
|
//badNonce violates db Nonce field length
|
||||||
badNonce := fmt.Sprintf("x %s", strings.Repeat("1", 100))
|
badNonce := fmt.Sprintf("x %s", strings.Repeat("1", 100))
|
||||||
@ -47,7 +85,7 @@ var _ = Describe("Postgres repository", func() {
|
|||||||
Transactions: []core.Transaction{},
|
Transactions: []core.Transaction{},
|
||||||
}
|
}
|
||||||
cfg, _ := config.NewConfig("private")
|
cfg, _ := config.NewConfig("private")
|
||||||
node := core.Node{GenesisBlock: "GENESIS", NetworkId: 1}
|
node := core.Node{GenesisBlock: "GENESIS", NetworkId: 1, Id: "x123", ClientName: "geth"}
|
||||||
repository, _ := repositories.NewPostgres(cfg.Database, node)
|
repository, _ := repositories.NewPostgres(cfg.Database, node)
|
||||||
|
|
||||||
err1 := repository.CreateOrUpdateBlock(badBlock)
|
err1 := repository.CreateOrUpdateBlock(badBlock)
|
||||||
@ -60,7 +98,7 @@ var _ = Describe("Postgres repository", func() {
|
|||||||
|
|
||||||
It("throws error when can't connect to the database", func() {
|
It("throws error when can't connect to the database", func() {
|
||||||
invalidDatabase := config.Database{}
|
invalidDatabase := config.Database{}
|
||||||
node := core.Node{GenesisBlock: "GENESIS", NetworkId: 1}
|
node := core.Node{GenesisBlock: "GENESIS", NetworkId: 1, Id: "x123", ClientName: "geth"}
|
||||||
_, err := repositories.NewPostgres(invalidDatabase, node)
|
_, err := repositories.NewPostgres(invalidDatabase, node)
|
||||||
Expect(err).To(Equal(repositories.ErrDBConnectionFailed))
|
Expect(err).To(Equal(repositories.ErrDBConnectionFailed))
|
||||||
})
|
})
|
||||||
@ -68,7 +106,7 @@ var _ = Describe("Postgres repository", func() {
|
|||||||
It("throws error when can't create node", func() {
|
It("throws error when can't create node", func() {
|
||||||
cfg, _ := config.NewConfig("private")
|
cfg, _ := config.NewConfig("private")
|
||||||
badHash := fmt.Sprintf("x %s", strings.Repeat("1", 100))
|
badHash := fmt.Sprintf("x %s", strings.Repeat("1", 100))
|
||||||
node := core.Node{GenesisBlock: badHash, NetworkId: 1}
|
node := core.Node{GenesisBlock: badHash, NetworkId: 1, Id: "x123", ClientName: "geth"}
|
||||||
_, err := repositories.NewPostgres(cfg.Database, node)
|
_, err := repositories.NewPostgres(cfg.Database, node)
|
||||||
Expect(err).To(Equal(repositories.ErrUnableToSetNode))
|
Expect(err).To(Equal(repositories.ErrUnableToSetNode))
|
||||||
})
|
})
|
||||||
@ -82,7 +120,7 @@ var _ = Describe("Postgres repository", func() {
|
|||||||
TxHash: badTxHash,
|
TxHash: badTxHash,
|
||||||
}
|
}
|
||||||
cfg, _ := config.NewConfig("private")
|
cfg, _ := config.NewConfig("private")
|
||||||
node := core.Node{GenesisBlock: "GENESIS", NetworkId: 1}
|
node := core.Node{GenesisBlock: "GENESIS", NetworkId: 1, Id: "x123", ClientName: "geth"}
|
||||||
repository, _ := repositories.NewPostgres(cfg.Database, node)
|
repository, _ := repositories.NewPostgres(cfg.Database, node)
|
||||||
|
|
||||||
err := repository.CreateLogs([]core.Log{badLog})
|
err := repository.CreateLogs([]core.Log{badLog})
|
||||||
@ -101,7 +139,7 @@ var _ = Describe("Postgres repository", func() {
|
|||||||
Transactions: []core.Transaction{badTransaction},
|
Transactions: []core.Transaction{badTransaction},
|
||||||
}
|
}
|
||||||
cfg, _ := config.NewConfig("private")
|
cfg, _ := config.NewConfig("private")
|
||||||
node := core.Node{GenesisBlock: "GENESIS", NetworkId: 1}
|
node := core.Node{GenesisBlock: "GENESIS", NetworkId: 1, Id: "x123", ClientName: "geth"}
|
||||||
repository, _ := repositories.NewPostgres(cfg.Database, node)
|
repository, _ := repositories.NewPostgres(cfg.Database, node)
|
||||||
|
|
||||||
err1 := repository.CreateOrUpdateBlock(block)
|
err1 := repository.CreateOrUpdateBlock(block)
|
||||||
|
@ -1,6 +1,9 @@
|
|||||||
package repositories
|
package repositories
|
||||||
|
|
||||||
import "github.com/vulcanize/vulcanizedb/pkg/core"
|
import (
|
||||||
|
"github.com/vulcanize/vulcanizedb/pkg/core"
|
||||||
|
"github.com/vulcanize/vulcanizedb/pkg/filters"
|
||||||
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
blocksFromHeadBeforeFinal = 20
|
blocksFromHeadBeforeFinal = 20
|
||||||
@ -19,4 +22,5 @@ type Repository interface {
|
|||||||
CreateLogs(log []core.Log) error
|
CreateLogs(log []core.Log) error
|
||||||
FindLogs(address string, blockNumber int64) []core.Log
|
FindLogs(address string, blockNumber int64) []core.Log
|
||||||
SetBlocksStatus(chainHead int64)
|
SetBlocksStatus(chainHead int64)
|
||||||
|
AddFilter(filter filters.LogFilter) error
|
||||||
}
|
}
|
||||||
|
@ -4,7 +4,10 @@ import (
|
|||||||
"sort"
|
"sort"
|
||||||
"strconv"
|
"strconv"
|
||||||
|
|
||||||
|
"math/big"
|
||||||
|
|
||||||
"github.com/vulcanize/vulcanizedb/pkg/core"
|
"github.com/vulcanize/vulcanizedb/pkg/core"
|
||||||
|
"github.com/vulcanize/vulcanizedb/pkg/filters"
|
||||||
"github.com/vulcanize/vulcanizedb/pkg/repositories"
|
"github.com/vulcanize/vulcanizedb/pkg/repositories"
|
||||||
. "github.com/onsi/ginkgo"
|
. "github.com/onsi/ginkgo"
|
||||||
. "github.com/onsi/gomega"
|
. "github.com/onsi/gomega"
|
||||||
@ -16,13 +19,19 @@ func ClearData(postgres repositories.Postgres) {
|
|||||||
postgres.Db.MustExec("DELETE FROM blocks")
|
postgres.Db.MustExec("DELETE FROM blocks")
|
||||||
postgres.Db.MustExec("DELETE FROM logs")
|
postgres.Db.MustExec("DELETE FROM logs")
|
||||||
postgres.Db.MustExec("DELETE FROM receipts")
|
postgres.Db.MustExec("DELETE FROM receipts")
|
||||||
|
postgres.Db.MustExec("DELETE FROM log_filters")
|
||||||
}
|
}
|
||||||
|
|
||||||
func AssertRepositoryBehavior(buildRepository func(node core.Node) repositories.Repository) {
|
func AssertRepositoryBehavior(buildRepository func(node core.Node) repositories.Repository) {
|
||||||
var repository repositories.Repository
|
var repository repositories.Repository
|
||||||
|
|
||||||
BeforeEach(func() {
|
BeforeEach(func() {
|
||||||
node := core.Node{GenesisBlock: "GENESIS", NetworkId: 1}
|
node := core.Node{
|
||||||
|
GenesisBlock: "GENESIS",
|
||||||
|
NetworkId: 1,
|
||||||
|
Id: "b6f90c0fdd8ec9607aed8ee45c69322e47b7063f0bfb7a29c8ecafab24d0a22d24dd2329b5ee6ed4125a03cb14e57fd584e67f9e53e6c631055cbbd82f080845",
|
||||||
|
ClientName: "Geth/v1.7.2-stable-1db4ecdc/darwin-amd64/go1.9",
|
||||||
|
}
|
||||||
repository = buildRepository(node)
|
repository = buildRepository(node)
|
||||||
})
|
})
|
||||||
|
|
||||||
@ -48,6 +57,8 @@ func AssertRepositoryBehavior(buildRepository func(node core.Node) repositories.
|
|||||||
nodeTwo := core.Node{
|
nodeTwo := core.Node{
|
||||||
GenesisBlock: "0x456",
|
GenesisBlock: "0x456",
|
||||||
NetworkId: 1,
|
NetworkId: 1,
|
||||||
|
Id: "x123456",
|
||||||
|
ClientName: "Geth",
|
||||||
}
|
}
|
||||||
repositoryTwo := buildRepository(nodeTwo)
|
repositoryTwo := buildRepository(nodeTwo)
|
||||||
|
|
||||||
@ -191,7 +202,8 @@ func AssertRepositoryBehavior(buildRepository func(node core.Node) repositories.
|
|||||||
nonce := uint64(10000)
|
nonce := uint64(10000)
|
||||||
to := "1234567890"
|
to := "1234567890"
|
||||||
from := "0987654321"
|
from := "0987654321"
|
||||||
value := int64(10)
|
var value = new(big.Int)
|
||||||
|
value.SetString("34940183920000000000", 10)
|
||||||
inputData := "0xf7d8c8830000000000000000000000000000000000000000000000000000000000037788000000000000000000000000000000000000000000000000000000000003bd14"
|
inputData := "0xf7d8c8830000000000000000000000000000000000000000000000000000000000037788000000000000000000000000000000000000000000000000000000000003bd14"
|
||||||
transaction := core.Transaction{
|
transaction := core.Transaction{
|
||||||
Hash: "x1234",
|
Hash: "x1234",
|
||||||
@ -200,7 +212,7 @@ func AssertRepositoryBehavior(buildRepository func(node core.Node) repositories.
|
|||||||
Nonce: nonce,
|
Nonce: nonce,
|
||||||
To: to,
|
To: to,
|
||||||
From: from,
|
From: from,
|
||||||
Value: value,
|
Value: value.String(),
|
||||||
Data: inputData,
|
Data: inputData,
|
||||||
}
|
}
|
||||||
block := core.Block{
|
block := core.Block{
|
||||||
@ -220,7 +232,7 @@ func AssertRepositoryBehavior(buildRepository func(node core.Node) repositories.
|
|||||||
Expect(savedTransaction.Nonce).To(Equal(nonce))
|
Expect(savedTransaction.Nonce).To(Equal(nonce))
|
||||||
Expect(savedTransaction.GasLimit).To(Equal(gasLimit))
|
Expect(savedTransaction.GasLimit).To(Equal(gasLimit))
|
||||||
Expect(savedTransaction.GasPrice).To(Equal(gasPrice))
|
Expect(savedTransaction.GasPrice).To(Equal(gasPrice))
|
||||||
Expect(savedTransaction.Value).To(Equal(value))
|
Expect(savedTransaction.Value).To(Equal(value.String()))
|
||||||
})
|
})
|
||||||
|
|
||||||
})
|
})
|
||||||
@ -392,7 +404,7 @@ func AssertRepositoryBehavior(buildRepository func(node core.Node) repositories.
|
|||||||
Index: 0,
|
Index: 0,
|
||||||
Address: "x123",
|
Address: "x123",
|
||||||
TxHash: "x456",
|
TxHash: "x456",
|
||||||
Topics: map[int]string{0: "x777", 1: "x888", 2: "x999"},
|
Topics: core.Topics{0: "x777", 1: "x888", 2: "x999"},
|
||||||
Data: "xabc",
|
Data: "xabc",
|
||||||
}},
|
}},
|
||||||
)
|
)
|
||||||
@ -415,37 +427,13 @@ func AssertRepositoryBehavior(buildRepository func(node core.Node) repositories.
|
|||||||
Expect(log).To(BeNil())
|
Expect(log).To(BeNil())
|
||||||
})
|
})
|
||||||
|
|
||||||
It("updates the log when log with when log with same block number and index is already present", func() {
|
|
||||||
repository.CreateLogs([]core.Log{{
|
|
||||||
BlockNumber: 1,
|
|
||||||
Index: 0,
|
|
||||||
Address: "x123",
|
|
||||||
TxHash: "x456",
|
|
||||||
Topics: map[int]string{0: "x777", 1: "x888", 2: "x999"},
|
|
||||||
Data: "xABC",
|
|
||||||
},
|
|
||||||
})
|
|
||||||
repository.CreateLogs([]core.Log{{
|
|
||||||
BlockNumber: 1,
|
|
||||||
Index: 0,
|
|
||||||
Address: "x123",
|
|
||||||
TxHash: "x456",
|
|
||||||
Topics: map[int]string{0: "x777", 1: "x888", 2: "x999"},
|
|
||||||
Data: "xXYZ",
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
log := repository.FindLogs("x123", 1)
|
|
||||||
Expect(log[0].Data).To(Equal("xXYZ"))
|
|
||||||
})
|
|
||||||
|
|
||||||
It("filters to the correct block number and address", func() {
|
It("filters to the correct block number and address", func() {
|
||||||
repository.CreateLogs([]core.Log{{
|
repository.CreateLogs([]core.Log{{
|
||||||
BlockNumber: 1,
|
BlockNumber: 1,
|
||||||
Index: 0,
|
Index: 0,
|
||||||
Address: "x123",
|
Address: "x123",
|
||||||
TxHash: "x456",
|
TxHash: "x456",
|
||||||
Topics: map[int]string{0: "x777", 1: "x888", 2: "x999"},
|
Topics: core.Topics{0: "x777", 1: "x888", 2: "x999"},
|
||||||
Data: "xabc",
|
Data: "xabc",
|
||||||
}},
|
}},
|
||||||
)
|
)
|
||||||
@ -454,7 +442,7 @@ func AssertRepositoryBehavior(buildRepository func(node core.Node) repositories.
|
|||||||
Index: 1,
|
Index: 1,
|
||||||
Address: "x123",
|
Address: "x123",
|
||||||
TxHash: "x789",
|
TxHash: "x789",
|
||||||
Topics: map[int]string{0: "x111", 1: "x222", 2: "x333"},
|
Topics: core.Topics{0: "x111", 1: "x222", 2: "x333"},
|
||||||
Data: "xdef",
|
Data: "xdef",
|
||||||
}},
|
}},
|
||||||
)
|
)
|
||||||
@ -463,7 +451,7 @@ func AssertRepositoryBehavior(buildRepository func(node core.Node) repositories.
|
|||||||
Index: 0,
|
Index: 0,
|
||||||
Address: "x123",
|
Address: "x123",
|
||||||
TxHash: "x456",
|
TxHash: "x456",
|
||||||
Topics: map[int]string{0: "x777", 1: "x888", 2: "x999"},
|
Topics: core.Topics{0: "x777", 1: "x888", 2: "x999"},
|
||||||
Data: "xabc",
|
Data: "xabc",
|
||||||
}},
|
}},
|
||||||
)
|
)
|
||||||
@ -497,6 +485,85 @@ func AssertRepositoryBehavior(buildRepository func(node core.Node) repositories.
|
|||||||
{blockNumber: 1, Index: 1}},
|
{blockNumber: 1, Index: 1}},
|
||||||
))
|
))
|
||||||
})
|
})
|
||||||
|
|
||||||
|
It("saves the logs attached to a receipt", func() {
|
||||||
|
logs := []core.Log{{
|
||||||
|
Address: "0x8a4774fe82c63484afef97ca8d89a6ea5e21f973",
|
||||||
|
BlockNumber: 4745407,
|
||||||
|
Data: "0x0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000645a68669900000000000000000000000000000000000000000000003397684ab5869b0000000000000000000000000000000000000000000000000000000000005a36053200000000000000000000000099041f808d598b782d5a3e498681c2452a31da08",
|
||||||
|
Index: 86,
|
||||||
|
Topics: core.Topics{
|
||||||
|
0: "0x5a68669900000000000000000000000000000000000000000000000000000000",
|
||||||
|
1: "0x000000000000000000000000d0148dad63f73ce6f1b6c607e3413dcf1ff5f030",
|
||||||
|
2: "0x00000000000000000000000000000000000000000000003397684ab5869b0000",
|
||||||
|
3: "0x000000000000000000000000000000000000000000000000000000005a360532",
|
||||||
|
},
|
||||||
|
TxHash: "0x002c4799161d809b23f67884eb6598c9df5894929fe1a9ead97ca175d360f547",
|
||||||
|
}, {
|
||||||
|
Address: "0x99041f808d598b782d5a3e498681c2452a31da08",
|
||||||
|
BlockNumber: 4745407,
|
||||||
|
Data: "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000000418178358",
|
||||||
|
Index: 87,
|
||||||
|
Topics: core.Topics{
|
||||||
|
0: "0x1817835800000000000000000000000000000000000000000000000000000000",
|
||||||
|
1: "0x0000000000000000000000008a4774fe82c63484afef97ca8d89a6ea5e21f973",
|
||||||
|
2: "0x0000000000000000000000000000000000000000000000000000000000000000",
|
||||||
|
3: "0x0000000000000000000000000000000000000000000000000000000000000000",
|
||||||
|
},
|
||||||
|
TxHash: "0x002c4799161d809b23f67884eb6598c9df5894929fe1a9ead97ca175d360f547",
|
||||||
|
}, {
|
||||||
|
Address: "0x99041f808d598b782d5a3e498681c2452a31da08",
|
||||||
|
BlockNumber: 4745407,
|
||||||
|
Data: "0x00000000000000000000000000000000000000000000003338f64c8423af4000",
|
||||||
|
Index: 88,
|
||||||
|
Topics: core.Topics{
|
||||||
|
0: "0x296ba4ca62c6c21c95e828080cb8aec7481b71390585605300a8a76f9e95b527",
|
||||||
|
},
|
||||||
|
TxHash: "0x002c4799161d809b23f67884eb6598c9df5894929fe1a9ead97ca175d360f547",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
receipt := core.Receipt{
|
||||||
|
ContractAddress: "",
|
||||||
|
CumulativeGasUsed: 7481414,
|
||||||
|
GasUsed: 60711,
|
||||||
|
Logs: logs,
|
||||||
|
Bloom: "0x00000800000000000000001000000000000000400000000080000000000000000000400000010000000000000000000000000000040000000000000002000000000000000000000000000000000000000000000000000000000000000000000000000000020000000000000000000800004000000000000001000000000000000000000000000002000000480000000000000002000000000000000020000000000000000000000000000000000000000080000000000180000c00000000000000002000002000000040000000000000000000000000000010000000000020000000000000000000002000000000000000000000000400800000000000000000",
|
||||||
|
Status: 1,
|
||||||
|
TxHash: "0x002c4799161d809b23f67884eb6598c9df5894929fe1a9ead97ca175d360f547",
|
||||||
|
}
|
||||||
|
transaction :=
|
||||||
|
core.Transaction{
|
||||||
|
Hash: receipt.TxHash,
|
||||||
|
Receipt: receipt,
|
||||||
|
}
|
||||||
|
|
||||||
|
block := core.Block{Transactions: []core.Transaction{transaction}}
|
||||||
|
err := repository.CreateOrUpdateBlock(block)
|
||||||
|
Expect(err).To(Not(HaveOccurred()))
|
||||||
|
retrievedLogs := repository.FindLogs("0x99041f808d598b782d5a3e498681c2452a31da08", 4745407)
|
||||||
|
|
||||||
|
expected := logs[1:]
|
||||||
|
Expect(retrievedLogs).To(Equal(expected))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("still saves receipts without logs", func() {
|
||||||
|
receipt := core.Receipt{
|
||||||
|
TxHash: "0x002c4799161d809b23f67884eb6598c9df5894929fe1a9ead97ca175d360f547",
|
||||||
|
}
|
||||||
|
transaction := core.Transaction{
|
||||||
|
Hash: receipt.TxHash,
|
||||||
|
Receipt: receipt,
|
||||||
|
}
|
||||||
|
|
||||||
|
block := core.Block{
|
||||||
|
Transactions: []core.Transaction{transaction},
|
||||||
|
}
|
||||||
|
repository.CreateOrUpdateBlock(block)
|
||||||
|
|
||||||
|
_, err := repository.FindReceipt(receipt.TxHash)
|
||||||
|
|
||||||
|
Expect(err).To(Not(HaveOccurred()))
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
Describe("Saving receipts", func() {
|
Describe("Saving receipts", func() {
|
||||||
@ -535,6 +602,43 @@ func AssertRepositoryBehavior(buildRepository func(node core.Node) repositories.
|
|||||||
Expect(err).To(HaveOccurred())
|
Expect(err).To(HaveOccurred())
|
||||||
Expect(receipt).To(BeZero())
|
Expect(receipt).To(BeZero())
|
||||||
})
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("LogFilter", func() {
|
||||||
|
|
||||||
|
It("inserts filter into watched events", func() {
|
||||||
|
|
||||||
|
logFilter := filters.LogFilter{
|
||||||
|
Name: "TestFilter",
|
||||||
|
FromBlock: 1,
|
||||||
|
ToBlock: 2,
|
||||||
|
Address: "0x8888f1f195afa192cfee860698584c030f4c9db1",
|
||||||
|
Topics: core.Topics{
|
||||||
|
"0x000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b",
|
||||||
|
"",
|
||||||
|
"0x000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b",
|
||||||
|
"",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
err := repository.AddFilter(logFilter)
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("returns error if name is not provided", func() {
|
||||||
|
|
||||||
|
logFilter := filters.LogFilter{
|
||||||
|
FromBlock: 1,
|
||||||
|
ToBlock: 2,
|
||||||
|
Address: "0x8888f1f195afa192cfee860698584c030f4c9db1",
|
||||||
|
Topics: core.Topics{
|
||||||
|
"0x000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b",
|
||||||
|
"",
|
||||||
|
"0x000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b",
|
||||||
|
"",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
err := repository.AddFilter(logFilter)
|
||||||
|
Expect(err).To(HaveOccurred())
|
||||||
|
})
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -1,14 +1,14 @@
|
|||||||
package cmd
|
package utils
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"log"
|
"log"
|
||||||
|
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
|
||||||
"fmt"
|
|
||||||
|
|
||||||
"math/big"
|
"math/big"
|
||||||
|
|
||||||
|
"os"
|
||||||
|
|
||||||
"github.com/vulcanize/vulcanizedb/pkg/config"
|
"github.com/vulcanize/vulcanizedb/pkg/config"
|
||||||
"github.com/vulcanize/vulcanizedb/pkg/core"
|
"github.com/vulcanize/vulcanizedb/pkg/core"
|
||||||
"github.com/vulcanize/vulcanizedb/pkg/geth"
|
"github.com/vulcanize/vulcanizedb/pkg/geth"
|
||||||
@ -32,9 +32,7 @@ func LoadPostgres(database config.Database, node core.Node) repositories.Postgre
|
|||||||
}
|
}
|
||||||
|
|
||||||
func ReadAbiFile(abiFilepath string) string {
|
func ReadAbiFile(abiFilepath string) string {
|
||||||
if !filepath.IsAbs(abiFilepath) {
|
abiFilepath = AbsFilePath(abiFilepath)
|
||||||
abiFilepath = filepath.Join(config.ProjectRoot(), abiFilepath)
|
|
||||||
}
|
|
||||||
abi, err := geth.ReadAbiFile(abiFilepath)
|
abi, err := geth.ReadAbiFile(abiFilepath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatalf("Error reading ABI file at \"%s\"\n %v", abiFilepath, err)
|
log.Fatalf("Error reading ABI file at \"%s\"\n %v", abiFilepath, err)
|
||||||
@ -42,13 +40,22 @@ func ReadAbiFile(abiFilepath string) string {
|
|||||||
return abi
|
return abi
|
||||||
}
|
}
|
||||||
|
|
||||||
func GetAbi(abiFilepath string, contractHash string) string {
|
func AbsFilePath(filePath string) string {
|
||||||
|
if !filepath.IsAbs(filePath) {
|
||||||
|
cwd, _ := os.Getwd()
|
||||||
|
filePath = filepath.Join(cwd, filePath)
|
||||||
|
}
|
||||||
|
return filePath
|
||||||
|
}
|
||||||
|
|
||||||
|
func GetAbi(abiFilepath string, contractHash string, network string) string {
|
||||||
var contractAbiString string
|
var contractAbiString string
|
||||||
if abiFilepath != "" {
|
if abiFilepath != "" {
|
||||||
contractAbiString = ReadAbiFile(abiFilepath)
|
contractAbiString = ReadAbiFile(abiFilepath)
|
||||||
} else {
|
} else {
|
||||||
etherscan := geth.NewEtherScanClient("https://api.etherscan.io")
|
url := geth.GenUrl(network)
|
||||||
fmt.Println("No ABI supplied. Retrieving ABI from Etherscan")
|
etherscan := geth.NewEtherScanClient(url)
|
||||||
|
log.Printf("No ABI supplied. Retrieving ABI from Etherscan: %s", url)
|
||||||
contractAbiString, _ = etherscan.GetAbi(contractHash)
|
contractAbiString, _ = etherscan.GetAbi(contractHash)
|
||||||
}
|
}
|
||||||
_, err := geth.ParseAbi(contractAbiString)
|
_, err := geth.ParseAbi(contractAbiString)
|
Loading…
Reference in New Issue
Block a user