Merge pull request #74 from vulcanize/staging

Merge major updates
This commit is contained in:
Ian Norden 2019-04-09 13:42:15 -05:00 committed by GitHub
commit c7a2e85d67
2590 changed files with 296379 additions and 159888 deletions

16
.dockerignore Normal file
View File

@ -0,0 +1,16 @@
.git
.travis.yml
.idea
bin
.gitignore
integration_test
LICENSE
postgraphile
.private_blockchain_password
README.md
scripts
Supfile
test_config
.travis.yml
vulcanizedb.log
Dockerfile

7
.gitignore vendored
View File

@ -1,4 +1,5 @@
.idea
.vscode
test_data_dir/
contracts/*
environments/*.toml
@ -7,3 +8,9 @@ vagrant*.sh
.vagrant
test_scripts/
vulcanizedb
postgraphile/build/
postgraphile/node_modules/
postgraphile/package-lock.json
vulcanizedb.log
db/migrations/20*.sql
plugins/*.so

View File

@ -1 +0,0 @@
1234

View File

@ -5,21 +5,25 @@ go:
services:
- postgresql
addons:
postgresql: "9.6"
postgresql: '9.6'
go_import_path: github.com/vulcanize/vulcanizedb
before_install:
# ginkgo golint dep migrate
- make installtools
- bash ./scripts/install-postgres-10.sh
- curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | sudo apt-key add -
- echo "deb https://dl.yarnpkg.com/debian/ stable main" | sudo tee /etc/apt/sources.list.d/yarn.list
- sudo apt-get update && sudo apt-get install yarn
before_script:
- sudo -u postgres createdb vulcanize_private
- make migrate NAME=vulcanize_private
- cd postgraphile && yarn
script:
- yarn test
- cd ../
- make test
- make integrationtest
notifications:
email: false
env:
matrix:
secure: GA4GiMHy868UD/C+59jRUOdJxxdLSMauNapPA31AODUMv0lcEOXg6TBJkv6Vn3OOrLHYYSpwCGiGQjF86U2vpKWzD1mSITalPfP/kNpFnrHVNf2oPpfrQs8ccs9gnFpbCNLDy0w2+55pgnrd5hvdnlsTEUS1lHWSKEZmWsSqUeS/dw22IS0+nUja0Pc3pHaWlJz2R502uEKIixjq1fOX65Z/9r1BrMqklOwWrABK0GwgYcDZwc+b3iovyDPkDa3sT/Ddz/srkcYi/Y182zdLdrknj3MVR3GiW5iSqIVm6gr427SFXF4EQVwkEBwAGoo83wRhCgCAslYTRZZ5aleb1HUtAWzuiMAd/JlAYb2jc6VJa19+rQ/5wd7Bann3hB5bY2uV+b02qFK3qWe6uqZLBSESWjFEyWJVoaXgpH0QDc0JG1R9UFxLSVa8NM3P/jgTnUgnAvdW36mTC4dg73Ha9bYdeLpuAZr6k8iTh5qp0CazKAazsdJ+ECNt6q3fkgsI/V5ZwCFfQwlauq+PiBaZsQqr7eeP19klipE3vGf6BCBRvCBdubzSgGZPIRkxOQREc24j/gzBjBNGeqoU2uOp8sXMEG5idfIyHbMU+n2Yi6IRKKPe5lwjgWEsqLkkoM2REMPIEPffRQz0f+mD48HWP0H4KjKHvubKRCE0v2ONlM4=

24
Dockerfile Normal file
View File

@ -0,0 +1,24 @@
FROM golang:alpine as builder
RUN apk --update --no-cache add make git g++
# Build statically linked vDB binary (wonky path because of Dep)
RUN mkdir -p /go/src/github.com/vulcanize/vulcanizedb
ADD . /go/src/github.com/vulcanize/vulcanizedb
WORKDIR /go/src/github.com/vulcanize/vulcanizedb
RUN GCO_ENABLED=0 GOOS=linux go build -a -installsuffix cgo -ldflags '-extldflags "-static"' .
# Build migration tool
RUN go get -u -d github.com/pressly/goose/cmd/goose
WORKDIR /go/src/github.com/pressly/goose/cmd/goose
RUN GCO_ENABLED=0 GOOS=linux go build -a -installsuffix cgo -ldflags '-extldflags "-static"' -tags='no_mysql no_sqlite' -o goose
# Second stage
FROM alpine
COPY --from=builder /go/src/github.com/vulcanize/vulcanizedb/vulcanizedb /app/vulcanizedb
COPY --from=builder /go/src/github.com/vulcanize/vulcanizedb/environments/staging.toml /app/environments/
COPY --from=builder /go/src/github.com/vulcanize/vulcanizedb/dockerfiles/startup_script.sh /app/
COPY --from=builder /go/src/github.com/vulcanize/vulcanizedb/db/migrations/* /app/
COPY --from=builder /go/src/github.com/pressly/goose/cmd/goose/goose /app/goose
WORKDIR /app
CMD ["./startup_script.sh"]

265
Gopkg.lock generated
View File

@ -2,20 +2,39 @@
[[projects]]
branch = "master"
digest = "1:c5e006ec5f6460f05964be0a9d34a42e3ad25bbc2564a289d7b4f6f2290c76e5"
name = "github.com/aristanetworks/goarista"
packages = ["monotime"]
digest = "1:48a213e9dc4880bbbd6999309a476fa4d3cc67560aa7127154cf8ea95bd464c2"
name = "github.com/allegro/bigcache"
packages = [
".",
"queue",
]
pruneopts = ""
revision = "8d0e8f607a4080e7df3532e645440ed0900c64a4"
revision = "f31987a23e44c5121ef8c8b2f2ea2e8ffa37b068"
version = "v1.1.0"
[[projects]]
branch = "master"
digest = "1:735be9b1e9daa0f731c20fbd2f0d4d0d3efbebe674f415f78b72c19defc07fa1"
digest = "1:a313376bcbcce8ae8bddb8089a7293e0473a0f8e9e3710d6244e09e81875ccf0"
name = "github.com/aristanetworks/goarista"
packages = ["monotime"]
pruneopts = ""
revision = "ff33da284e760fcdb03c33d37a719e5ed30ba844"
[[projects]]
branch = "master"
digest = "1:c6bf1ac7bbc0fe51637bf54d5a88ff79b171b3b42dbc665dec98303c862d8662"
name = "github.com/btcsuite/btcd"
packages = ["btcec"]
pruneopts = ""
revision = "2e60448ffcc6bf78332d1fe590260095f554dd78"
revision = "cff30e1d23fc9e800b2b5b4b41ef1817dda07e9f"
[[projects]]
digest = "1:5d47691333460db6ac83ced03c79b4bdb9aff3e322be24affb7855bed8affc6c"
name = "github.com/dave/jennifer"
packages = ["jen"]
pruneopts = ""
revision = "14e399b6b5e8456c66c45c955fc27b568bacb5c9"
version = "v1.3.0"
[[projects]]
digest = "1:aaeffbff5bd24654cb4c190ed75d6c7b57b4f5d6741914c1a7a6bb7447e756c5"
@ -26,7 +45,7 @@
version = "v1.7.1"
[[projects]]
digest = "1:c205f1963071408c1fac73c1b37c86ef9b98d80f17e690a2239853cde255ad3d"
digest = "1:90d36f5b581e95e00ced808cd48824ed6c320c25887828cce461bdef4cb7bc7c"
name = "github.com/ethereum/go-ethereum"
packages = [
".",
@ -40,22 +59,15 @@
"common/math",
"common/mclock",
"common/prque",
"consensus",
"consensus/misc",
"core",
"core/rawdb",
"core/state",
"core/types",
"core/vm",
"crypto",
"crypto/bn256",
"crypto/bn256/cloudflare",
"crypto/bn256/google",
"crypto/ecies",
"crypto/secp256k1",
"crypto/sha3",
"ethclient",
"ethdb",
"ethdb/leveldb",
"ethdb/memorydb",
"event",
"log",
"metrics",
@ -72,8 +84,7 @@
"trie",
]
pruneopts = ""
revision = "58632d44021bf095b43a1bb2443e6e3690a94739"
version = "v1.8.18"
revision = "cd79bc61a983d6482579d12cdd239b37bbfa12ef"
[[projects]]
digest = "1:eb53021a8aa3f599d29c7102e65026242bdedce998a54837dc67f14b6a97c5fd"
@ -84,36 +95,36 @@
version = "v1.4.7"
[[projects]]
digest = "1:9ca737b471693542351e112c9e86be9bf7385e42256893a09ecb2a98e2036f74"
digest = "1:a01080d20c45c031c13f3828c56e58f4f51d926a482ad10cc0316225097eb7ea"
name = "github.com/go-stack/stack"
packages = ["."]
pruneopts = ""
revision = "259ab82a6cad3992b4e21ff5cac294ccb06474bc"
version = "v1.7.0"
revision = "2fee6af1a9795aafbe0253a0cfbdf668e1fb8a9a"
version = "v1.8.0"
[[projects]]
branch = "master"
digest = "1:3b760d3b93f994df8eb1d9ebfad17d3e9e37edcb7f7efaa15b427c0d7a64f4e4"
digest = "1:3dd078fda7500c341bc26cfbc6c6a34614f295a2457149fc1045cab767cbcf18"
name = "github.com/golang/protobuf"
packages = ["proto"]
pruneopts = ""
revision = "1e59b77b52bf8e4b449a57e6f79f21226d571845"
revision = "aa810b61a9c79d51363740d207bb46cf8e620ed5"
version = "v1.2.0"
[[projects]]
branch = "master"
digest = "1:09307dfb1aa3f49a2bf869dcfa4c6c06ecd3c207221bd1c1a1141f0e51f209eb"
digest = "1:2a5888946cdbc8aa360fd43301f9fc7869d663f60d5eedae7d4e6e5e4f06f2bf"
name = "github.com/golang/snappy"
packages = ["."]
pruneopts = ""
revision = "553a641470496b2327abcac10b36396bd98e45c9"
revision = "2e65f85255dbc3072edf28d6b5b8efc472979f5a"
[[projects]]
digest = "1:a25a2c5ae694b01713fb6cd03c3b1ac1ccc1902b9f0a922680a88ec254f968e1"
digest = "1:5247b135b5492aa232a731acdcb52b08f32b874cb398f21ab460396eadbe866b"
name = "github.com/google/uuid"
packages = ["."]
pruneopts = ""
revision = "9b3b1e0f5f99ae461456d768e7d301a7acdaa2d8"
version = "v1.1.0"
revision = "d460ce9f8df2e77fb1ba55ca87fafed96c607494"
version = "v1.0.0"
[[projects]]
branch = "master"
@ -127,13 +138,13 @@
revision = "0fb14efe8c47ae851c0034ed7a448854d3d34cf3"
[[projects]]
branch = "master"
digest = "1:147d671753effde6d3bcd58fc74c1d67d740196c84c280c762a5417319499972"
digest = "1:d14365c51dd1d34d5c79833ec91413bfbb166be978724f15701e17080dc06dec"
name = "github.com/hashicorp/hcl"
packages = [
".",
"hcl/ast",
"hcl/parser",
"hcl/printer",
"hcl/scanner",
"hcl/strconv",
"hcl/token",
@ -142,11 +153,26 @@
"json/token",
]
pruneopts = ""
revision = "23c074d0eceb2b8a5bfdbb271ab780cde70f05a8"
revision = "8cb6e5b959231cc1119e43259c4a608f9c51a241"
version = "v1.0.0"
[[projects]]
digest = "1:b3c5b95e56c06f5aa72cb2500e6ee5f44fcd122872d4fec2023a488e561218bc"
name = "github.com/hpcloud/tail"
packages = [
".",
"ratelimiter",
"util",
"watch",
"winfile",
]
pruneopts = ""
revision = "a30252cb686a21eb2d0b98132633053ec2f7f1e5"
version = "v1.0.0"
[[projects]]
branch = "master"
digest = "1:b5ef0f034b2eda02c19c2cfcccc9754b372c4e04fa593972fbd8e3d9813d8d98"
digest = "1:b6e4cc26365c004808649862e22069de09594a9222143399a7a04904e9f7018c"
name = "github.com/huin/goupnp"
packages = [
".",
@ -158,7 +184,7 @@
"ssdp",
]
pruneopts = ""
revision = "dceda08e705b2acee36aab47d765ed801f64cfc7"
revision = "1395d1447324cbea88d249fbfcfd70ea878fdfca"
[[projects]]
digest = "1:870d441fe217b8e689d7949fef6e43efbc787e50f200cb1e70dbca9204a1d6be"
@ -178,52 +204,60 @@
[[projects]]
branch = "master"
digest = "1:fff68ae49ca708829c24a986fb0a044ee2b5aa44b65a0b57b1840fbb0a81f9a6"
digest = "1:617ee2434b77e911fa26b678730be9a617f75243b194eadc8201c8ac860844aa"
name = "github.com/jmoiron/sqlx"
packages = [
".",
"reflectx",
]
pruneopts = ""
revision = "99f3ad6d85ae53d0fecf788ab62d0e9734b3c117"
revision = "0dae4fefe7c0e190f7b5a78dac28a1c82cc8d849"
[[projects]]
digest = "1:6a874e3ddfb9db2b42bd8c85b6875407c702fa868eed20634ff489bc896ccfd3"
name = "github.com/konsorten/go-windows-terminal-sequences"
packages = ["."]
pruneopts = ""
revision = "5c8c8bd35d3832f5d134ae1e1e375b69a4d25242"
version = "v1.0.1"
[[projects]]
branch = "master"
digest = "1:3b5167b89f2203a949b71e29783418a0532531238ad36eb8610ec12e9c7b997f"
digest = "1:29145d7af4adafd72a79df5e41456ac9e232d5a28c1cd4dacf3ff008a217fc10"
name = "github.com/lib/pq"
packages = [
".",
"oid",
]
pruneopts = ""
revision = "83612a56d3dd153a94a629cd64925371c9adad78"
revision = "4ded0e9383f75c197b3a2aaa6d590ac52df6fd79"
[[projects]]
digest = "1:739b2038a38cebb50e922d18f4b042c042256320fea2db094814aeef8891e0c1"
digest = "1:961dc3b1d11f969370533390fdf203813162980c858e1dabe827b60940c909a5"
name = "github.com/magiconair/properties"
packages = ["."]
pruneopts = ""
revision = "d419a98cdbed11a922bf76f257b7c4be79b50e73"
version = "v1.7.4"
revision = "c2353362d570a7bfa228149c62842019201cfb71"
version = "v1.8.0"
[[projects]]
branch = "master"
digest = "1:59d11e81d6fdd12a771321696bb22abdd9a94d26ac864787e98c9b419e428734"
digest = "1:096a8a9182648da3d00ff243b88407838902b6703fc12657f76890e08d1899bf"
name = "github.com/mitchellh/go-homedir"
packages = ["."]
pruneopts = ""
revision = "b8bc1bf767474819792c23f32d8286a45736f1c6"
revision = "ae18d6b8b3205b561c79e8e5f69bff09736185f4"
version = "v1.0.0"
[[projects]]
branch = "master"
digest = "1:a31c8ff06bf8ccc1826e3a28a4132bf217a53f3b096e6a03dcac7b6a9eb796f9"
digest = "1:5219b4506253ccc598f9340677162a42d6a78f340a4cc6df2d62db4d0593c4e9"
name = "github.com/mitchellh/mapstructure"
packages = ["."]
pruneopts = ""
revision = "b4575eea38cca1123ec2dc90c26529b5c5acfcff"
revision = "fa473d140ef3c6adf42d6b391fe76707f1f243c8"
version = "v1.0.0"
[[projects]]
digest = "1:32b27072cd55bd2fb7244de0425943d125da6a552ae2b6517cdd965a662baf18"
digest = "1:a7fd918fb5bd2188436785c0424f8a50b4addfedf37a2b14d796be2a927b8007"
name = "github.com/onsi/ginkgo"
packages = [
".",
@ -246,11 +280,11 @@
"types",
]
pruneopts = ""
revision = "9eda700730cba42af70d53180f9dcce9266bc2bc"
version = "v1.4.0"
revision = "3774a09d95489ccaa16032e0770d08ea77ba6184"
version = "v1.6.0"
[[projects]]
digest = "1:a4e59d0b2821c983b58c317f141cd77df20570979632da8a7a352e5d12698de7"
digest = "1:3ecd0a37c4a90c12a97e31c398cdbc173824351aa891898ee178120bfe71c478"
name = "github.com/onsi/gomega"
packages = [
".",
@ -268,8 +302,8 @@
"types",
]
pruneopts = ""
revision = "c893efa28eb45626cdaa76c9f653b62488858837"
version = "v1.2.0"
revision = "7615b9433f86a8bdf29709bf288bc4fd0636a369"
version = "v1.4.2"
[[projects]]
digest = "1:a5484d4fa43127138ae6e7b2299a6a52ae006c7f803d98d717f60abf3e97192e"
@ -280,83 +314,91 @@
version = "v1.2"
[[projects]]
digest = "1:d60cfeee185019d4fcd35e8c89c83aff576e4723b6100300bf67b05be961388f"
digest = "1:894aef961c056b6d85d12bac890bf60c44e99b46292888bfa66caf529f804457"
name = "github.com/pelletier/go-toml"
packages = ["."]
pruneopts = ""
revision = "acdc4509485b587f5e675510c4f2c63e90ff68a8"
version = "v1.1.0"
revision = "c01d1270ff3e442a8a57cddc1c92dc1138598194"
version = "v1.2.0"
[[projects]]
digest = "1:6938b6ee0351393c55baa54b64352e49a741a1b7b616a7d134d237106812070f"
digest = "1:7143292549152d009ca9e9c493b74736a2ebd93f921bea8a4b308d7cc5edc6b3"
name = "github.com/rjeczalik/notify"
packages = ["."]
pruneopts = ""
revision = "69d839f37b13a8cb7a78366f7633a4071cb43be7"
version = "v0.9.2"
revision = "0f065fa99b48b842c3fd3e2c8b194c6f2b69f6b8"
version = "v0.9.1"
[[projects]]
digest = "1:bfc8db90e2676a2fc0d742a536f376044a9b74f2745b2c60d339eb06c6c6988a"
digest = "1:78c9cf43ddeacd0e472f412082227a0fac2ae107ee60e9112156f9371f9912cf"
name = "github.com/rs/cors"
packages = ["."]
pruneopts = ""
revision = "7af7a1e09ba336d2ea14b1ce73bf693c6837dbf6"
version = "v1.2"
revision = "3fb1b69b103a84de38a19c3c6ec073dd6caa4d3f"
version = "v1.5.0"
[[projects]]
digest = "1:dae0d7dd55563fd389e7263a32d2030022ef29cceff941336e53f6520e0308c0"
digest = "1:9d57e200ef5ccc4217fe0a34287308bac652435e7c6513f6263e0493d2245c56"
name = "github.com/sirupsen/logrus"
packages = ["."]
pruneopts = ""
revision = "bcd833dfe83d3cebad139e4a29ed79cb2318bf95"
version = "v1.2.0"
[[projects]]
digest = "1:d0431c2fd72e39ee43ea7742322abbc200c3e704c9102c5c3c2e2e667095b0ca"
name = "github.com/spf13/afero"
packages = [
".",
"mem",
]
pruneopts = ""
revision = "bb8f1927f2a9d3ab41c9340aa034f6b803f4359c"
version = "v1.0.2"
revision = "d40851caa0d747393da1ffb28f7f9d8b4eeffebd"
version = "v1.1.2"
[[projects]]
digest = "1:6ff9b74bfea2625f805edec59395dc37e4a06458dd3c14e3372337e3d35a2ed6"
digest = "1:d0b38ba6da419a6d4380700218eeec8623841d44a856bb57369c172fbf692ab4"
name = "github.com/spf13/cast"
packages = ["."]
pruneopts = ""
revision = "acbeb36b902d72a7a4c18e8f3241075e7ab763e4"
version = "v1.1.0"
revision = "8965335b8c7107321228e3e3702cab9832751bac"
version = "v1.2.0"
[[projects]]
digest = "1:2208a80fc3259291e43b30f42f844d18f4218036dff510f42c653ec9890d460a"
digest = "1:a1403cc8a94b8d7956ee5e9694badef0e7b051af289caad1cf668331e3ffa4f6"
name = "github.com/spf13/cobra"
packages = ["."]
pruneopts = ""
revision = "7b2c5ac9fc04fc5efafb60700713d4fa609b777b"
version = "v0.0.1"
revision = "ef82de70bb3f60c65fb8eebacbb2d122ef517385"
version = "v0.0.3"
[[projects]]
branch = "master"
digest = "1:104517520aab91164020ab6524a5d6b7cafc641b2e42ac6236f6ac1deac4f66a"
digest = "1:9ceffa4ab5f7195ecf18b3a7fff90c837a9ed5e22e66d18069e4bccfe1f52aa0"
name = "github.com/spf13/jwalterweatherman"
packages = ["."]
pruneopts = ""
revision = "7c0cea34c8ece3fbeb2b27ab9b59511d360fb394"
revision = "4a4406e478ca629068e7768fc33f3f044173c0a6"
version = "v1.0.0"
[[projects]]
digest = "1:261bc565833ef4f02121450d74eb88d5ae4bd74bfe5d0e862cddb8550ec35000"
digest = "1:0a52bcb568386d98f4894575d53ce3e456f56471de6897bb8b9de13c33d9340e"
name = "github.com/spf13/pflag"
packages = ["."]
pruneopts = ""
revision = "e57e3eeb33f795204c1ca35f56c44f83227c6e66"
version = "v1.0.0"
revision = "9a97c102cda95a86cec2345a6f09f55a939babf5"
version = "v1.0.2"
[[projects]]
digest = "1:59354ad53dfe6ed1b941844cb029cd37c0377598eec3a0d49c03aee2375ef9c4"
digest = "1:ac25ea6cc1156aca9611411274b4a0bdd83a623845df6985aab508253955cc66"
name = "github.com/spf13/viper"
packages = ["."]
pruneopts = ""
revision = "25b30aa063fc18e48662b86996252eabdcf2f0c7"
version = "v1.0.0"
revision = "8fb642006536c8d3760c99d4fa2389f5e2205631"
version = "v1.2.0"
[[projects]]
branch = "master"
digest = "1:f87eb23fc0c2b143947616f754533344626637d3ae1b03ba077136ccb17de3f2"
digest = "1:ce5194e5afac308cc34e500cab45b4ce88a0742d689e3cf7e37b607ad76bed2f"
name = "github.com/syndtr/goleveldb"
packages = [
"leveldb",
@ -373,23 +415,24 @@
"leveldb/util",
]
pruneopts = ""
revision = "adf24ef3f94bd13ec4163060b21a5678f22b429b"
revision = "ae2bd5eed72d46b28834ec3f60db3a3ebedd8dbd"
[[projects]]
branch = "master"
digest = "1:450f85e389d9cbd8265299827a2a062b1b6aee8d4c7b78d849d913a6eb405908"
digest = "1:59b49c47c11a48f1054529207f65907c014ecf5f9a7c0d9c0f1616dec7b062ed"
name = "golang.org/x/crypto"
packages = [
"pbkdf2",
"ripemd160",
"scrypt",
"sha3",
"ssh/terminal",
]
pruneopts = ""
revision = "613d6eafa307c6881a737a3c35c0e312e8d3a8c5"
revision = "ff983b9c42bc9fbf91556e191cc8efb585c16908"
[[projects]]
branch = "master"
digest = "1:950b672f2ee80d0fc4c95a15a976ba9ee573a6fb8ede8a777770b2230776367e"
digest = "1:fbdbb6cf8db3278412c9425ad78b26bb8eb788181f26a3ffb3e4f216b314f86a"
name = "golang.org/x/net"
packages = [
"context",
@ -399,7 +442,7 @@
"websocket",
]
pruneopts = ""
revision = "faacc1b5e36e3ff02cbec9661c69ac63dd5a83ad"
revision = "26e67e76b6c3f6ce91f7c52def5af501b4e0f3a2"
[[projects]]
branch = "master"
@ -411,18 +454,17 @@
[[projects]]
branch = "master"
digest = "1:303c0ee48d6229a2423950f41b3ccb5a2067dc4c7b65f8863cfbd962bef05a85"
digest = "1:70d519d5cddeb60ceda2db88c24c340b1b2d7efb25ab54bacb38f57ea1998df7"
name = "golang.org/x/sys"
packages = [
"cpu",
"unix",
"windows",
]
pruneopts = ""
revision = "62eef0e2fa9b2c385f7b2778e763486da6880d37"
revision = "d641721ec2dead6fe5ca284096fe4b1fcd49e427"
[[projects]]
branch = "master"
digest = "1:1c70f7bb89783a026dc32920575a3feef48e065ef6e170ad227903e8194d7a36"
digest = "1:5acd3512b047305d49e8763eef7ba423901e85d5dd2fd1e71778a0ea8de10bd4"
name = "golang.org/x/text"
packages = [
"encoding",
@ -447,7 +489,17 @@
"unicode/norm",
]
pruneopts = ""
revision = "be25de41fadfae372d6470bda81ca6beb55ef551"
revision = "f21a4dfb5e38f5895301dc265a8def02365cc3d0"
version = "v0.3.0"
[[projects]]
digest = "1:eb53021a8aa3f599d29c7102e65026242bdedce998a54837dc67f14b6a97c5fd"
name = "gopkg.in/fsnotify.v1"
packages = ["."]
pruneopts = ""
revision = "c2828203cd70a50dcccfb2761f8b1f8ceef9a8e9"
source = "gopkg.in/fsnotify/fsnotify.v1"
version = "v1.4.7"
[[projects]]
branch = "v2"
@ -458,42 +510,55 @@
revision = "c1b8fa8bdccecb0b8db834ee0b92fdbcfa606dd6"
[[projects]]
branch = "v2"
digest = "1:f769ed60e075e4221612c2f4162fccc9d3795ef358fa463425e3b3d7a5debb27"
branch = "v1"
digest = "1:a96d16bd088460f2e0685d46c39bcf1208ba46e0a977be2df49864ec7da447dd"
name = "gopkg.in/tomb.v1"
packages = ["."]
pruneopts = ""
revision = "dd632973f1e7218eb1089048e0798ec9ae7dceb8"
[[projects]]
digest = "1:f0620375dd1f6251d9973b5f2596228cc8042e887cd7f827e4220bc1ce8c30e2"
name = "gopkg.in/yaml.v2"
packages = ["."]
pruneopts = ""
revision = "287cf08546ab5e7e37d55a84f7ed3fd1db036de5"
revision = "5420a8b6744d3b0345ab293f6fcba19c978f1183"
version = "v2.2.1"
[solve-meta]
analyzer-name = "dep"
analyzer-version = 1
input-imports = [
"github.com/dave/jennifer/jen",
"github.com/ethereum/go-ethereum",
"github.com/ethereum/go-ethereum/accounts/abi",
"github.com/ethereum/go-ethereum/accounts/abi/bind",
"github.com/ethereum/go-ethereum/common",
"github.com/ethereum/go-ethereum/common/hexutil",
"github.com/ethereum/go-ethereum/core",
"github.com/ethereum/go-ethereum/core/rawdb",
"github.com/ethereum/go-ethereum/core/types",
"github.com/ethereum/go-ethereum/crypto",
"github.com/ethereum/go-ethereum/ethclient",
"github.com/ethereum/go-ethereum/ethdb",
"github.com/ethereum/go-ethereum/p2p",
"github.com/ethereum/go-ethereum/p2p/discover",
"github.com/ethereum/go-ethereum/p2p/discv5",
"github.com/ethereum/go-ethereum/params",
"github.com/ethereum/go-ethereum/rlp",
"github.com/ethereum/go-ethereum/rpc",
"github.com/ethereum/go-ethereum/core/types",
"github.com/hashicorp/golang-lru",
"github.com/hpcloud/tail",
"github.com/jmoiron/sqlx",
"github.com/lib/pq",
"github.com/mitchellh/go-homedir",
"github.com/onsi/ginkgo",
"github.com/onsi/gomega",
"github.com/onsi/gomega/ghttp",
"github.com/sirupsen/logrus",
"github.com/spf13/cobra",
"github.com/spf13/viper",
"golang.org/x/net/context",
"golang.org/x/sync/errgroup",
"gopkg.in/tomb.v1",
]
solver-name = "gps-cdcl"
solver-version = 1

View File

@ -21,6 +21,14 @@
# version = "2.4.0"
[[override]]
name = "gopkg.in/fsnotify.v1"
source = "gopkg.in/fsnotify/fsnotify.v1"
[[override]]
name = "github.com/pressly/sup"
version = "0.5.3"
[[constraint]]
name = "github.com/onsi/ginkgo"
version = "1.4.0"
@ -33,10 +41,14 @@
branch = "master"
name = "github.com/lib/pq"
[[constraint]]
name = "github.com/sirupsen/logrus"
version = "1.2.0"
[[constraint]]
name = "github.com/spf13/cobra"
version = "0.0.1"
[[constraint]]
name = "github.com/ethereum/go-ethereum"
version = "1.8.18"
revision = "cd79bc61a983d6482579d12cdd239b37bbfa12ef"

View File

@ -2,31 +2,37 @@ BIN = $(GOPATH)/bin
BASE = $(GOPATH)/src/$(PACKAGE)
PKGS = go list ./... | grep -v "^vendor/"
#Tools
# Tools
## Dependency management
DEP = $(BIN)/dep
$(BIN)/dep:
go get -u github.com/golang/dep/cmd/dep
## Testing library
GINKGO = $(BIN)/ginkgo
$(BIN)/ginkgo:
go get -u github.com/onsi/ginkgo/ginkgo
MIGRATE = $(BIN)/migrate
$(BIN)/migrate:
go get -u -d github.com/mattes/migrate/cli github.com/lib/pq
go build -tags 'postgres' -o $(BIN)/migrate github.com/mattes/migrate/cli
## Migration tool
GOOSE = $(BIN)/goose
$(BIN)/goose:
go get -u -d github.com/pressly/goose/cmd/goose
go build -tags='no_mysql no_sqlite' -o $(BIN)/goose github.com/pressly/goose/cmd/goose
## Source linter
LINT = $(BIN)/golint
$(BIN)/golint:
go get -u golang.org/x/lint/golint
## Combination linter
METALINT = $(BIN)/gometalinter.v2
$(BIN)/gometalinter.v2:
go get -u gopkg.in/alecthomas/gometalinter.v2
$(METALINT) --install
.PHONY: installtools
installtools: | $(LINT) $(MIGRATE) $(GINKGO) $(DEP)
installtools: | $(LINT) $(GOOSE) $(GINKGO) $(DEP)
echo "Installing tools"
.PHONY: metalint
@ -42,10 +48,15 @@ lint:
.PHONY: test
test: | $(GINKGO) $(LINT)
go get -t ./...
go vet ./...
go fmt ./...
$(GINKGO) -r
$(GINKGO) -r --skipPackage=integration_tests,integration
.PHONY: integrationtest
integrationtest: | $(GINKGO) $(LINT)
go vet ./...
go fmt ./...
$(GINKGO) -r integration_test/
.PHONY: dep
dep: | $(DEP)
@ -61,30 +72,74 @@ PORT = 5432
NAME =
CONNECT_STRING=postgresql://$(HOST_NAME):$(PORT)/$(NAME)?sslmode=disable
# Parameter checks
## Check that DB variables are provided
.PHONY: checkdbvars
checkdbvars:
test -n "$(HOST_NAME)" # $$HOST_NAME
test -n "$(HOST_NAME)" # $$HOST_NAME
test -n "$(PORT)" # $$PORT
test -n "$(NAME)" # $$NAME
@echo $(CONNECT_STRING)
## Check that the migration variable (id/timestamp) is provided
.PHONY: checkmigration
checkmigration:
test -n "$(MIGRATION)" # $$MIGRATION
# Check that the migration name is provided
.PHONY: checkmigname
checkmigname:
test -n "$(NAME)" # $$NAME
# Migration operations
## Rollback the last migration
.PHONY: rollback
rollback: $(MIGRATE) checkdbvars
$(MIGRATE) -database $(CONNECT_STRING) -path ./db/migrations down 1
rollback: $(GOOSE) checkdbvars
cd db/migrations;\
$(GOOSE) postgres "$(CONNECT_STRING)" down
pg_dump -O -s $(CONNECT_STRING) > db/schema.sql
## Rollbackt to a select migration (id/timestamp)
.PHONY: rollback_to
rollback_to: $(GOOSE) checkmigration checkdbvars
cd db/migrations;\
$(GOOSE) postgres "$(CONNECT_STRING)" down-to "$(MIGRATION)"
## Apply all migrations not already run
.PHONY: migrate
migrate: $(MIGRATE) checkdbvars
$(MIGRATE) -database $(CONNECT_STRING) -path ./db/migrations up
migrate: $(GOOSE) checkdbvars
cd db/migrations;\
$(GOOSE) postgres "$(CONNECT_STRING)" up
pg_dump -O -s $(CONNECT_STRING) > db/schema.sql
## Create a new migration file
.PHONY: new_migration
new_migration: $(GOOSE) checkmigname
cd db/migrations;\
$(GOOSE) create $(NAME) sql
## Check which migrations are applied at the moment
.PHONY: migration_status
migration_status: $(GOOSE) checkdbvars
cd db/migrations;\
$(GOOSE) postgres "$(CONNECT_STRING)" status
# Convert timestamped migrations to versioned (to be run in CI);
# merge timestamped files to prevent conflict
.PHONY: version_migrations
version_migrations:
cd db/migrations; $(GOOSE) fix
# Import a psql schema to the database
.PHONY: import
import:
test -n "$(NAME)" # $$NAME
psql $(NAME) < db/schema.sql
#Rinkeby docker environment
# Docker actions
## Rinkeby docker environment
RINKEBY_COMPOSE_FILE=dockerfiles/rinkeby/docker-compose.yml
.PHONY: rinkeby_env_up

181
README.md
View File

@ -1,61 +1,76 @@
# Vulcanize DB
[![Join the chat at https://gitter.im/vulcanizeio/VulcanizeDB](https://badges.gitter.im/vulcanizeio/VulcanizeDB.svg)](https://gitter.im/vulcanizeio/VulcanizeDB?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
[![Build Status](https://travis-ci.org/vulcanize/vulcanizedb.svg?branch=master)](https://travis-ci.org/vulcanize/vulcanizedb)
[![Go Report Card](https://goreportcard.com/badge/github.com/vulcanize/vulcanizedb)](https://goreportcard.com/report/github.com/vulcanize/vulcanizedb)
## About
> Vulcanize DB is a set of tools that make it easier for developers to write application-specific indexes and caches for dapps built on Ethereum.
## Table of Contents
1. [Background](../staging/README.md#background)
1. [Dependencies](../staging/README.md#dependencies)
1. [Install](../staging/README.md#install)
1. [Usage](../staging/README.md#usage)
1. [Tests](../staging/README.md#tests)
1. [API](../staging/README.md#API)
1. [Contributing](../staging/README.md#contributing)
1. [License](../staging/README.md#license)
## Background
The same data structures and encodings that make Ethereum an effective and trust-less distributed virtual machine
complicate data accessibility and usability for dApp developers.
Vulcanize DB is a set of tools that make it easier for developers to write application-specific indexes and caches for dapps built on Ethereum.
## Dependencies
- Go 1.11+
- Postgres 10
- Postgres 10.6
- Ethereum Node
- [Go Ethereum](https://ethereum.github.io/go-ethereum/downloads/) (1.8.18+)
- [Go Ethereum](https://ethereum.github.io/go-ethereum/downloads/) (1.8.23+)
- [Parity 1.8.11+](https://github.com/paritytech/parity/releases)
## Project Setup
Using Vulcanize for the first time requires several steps be done in order to allow use of the software. The following instructions will offer a guide through the steps of the process:
## Install
1. [Building the project](../staging/README.md#building-the-project)
1. [Setting up the database](../staging/README.md#setting-up-the-database)
1. [Configuring a synced Ethereum node](../staging/README.md#configuring-a-synced-ethereum-node)
1. Fetching the project
2. Installing dependencies
3. Configuring shell environment
4. Database setup
5. Configuring synced Ethereum node integration
6. Data syncing
## Installation
In order to fetch the project codebase for local use or modification, install it to your `GOPATH` via:
### Building the project
Download the codebase to your local `GOPATH` via:
`go get github.com/vulcanize/vulcanizedb`
Once fetched, dependencies can be installed via `go get` or (the preferred method) at specific versions via `golang/dep`, the prototype golang pakcage manager. Installation instructions are [here](https://golang.github.io/dep/docs/installation.html).
Move to the project directory and use [golang/dep](https://github.com/golang/dep) to install the dependencies:
In order to install packages with `dep`, ensure you are in the project directory now within your `GOPATH` (default location is `~/go/src/github.com/vulcanize/vulcanizedb/`) and run:
`cd $GOPATH/src/github.com/vulcanize/vulcanizedb`
`dep ensure`
After `dep` finishes, dependencies should be installed within your `GOPATH` at the versions specified in `Gopkg.toml`.
Once the dependencies have been successfully installed, build the executable with:
Lastly, ensure that `GOPATH` is defined in your shell. If necessary, `GOPATH` can be set in `~/.bashrc` or `~/.bash_profile`, depending upon your system. It can be additionally helpful to add `$GOPATH/bin` to your shell's `$PATH`.
`make build`
## Setting up the Database
If you are running into issues at this stage, ensure that `GOPATH` is defined in your shell.
If necessary, `GOPATH` can be set in `~/.bashrc` or `~/.bash_profile`, depending upon your system.
It can be additionally helpful to add `$GOPATH/bin` to your shell's `$PATH`.
### Setting up the database
1. Install Postgres
1. Create a superuser for yourself and make sure `psql --list` works without prompting for a password.
1. Execute `createdb vulcanize_public`
1. Execute `cd $GOPATH/src/github.com/vulcanize/vulcanizedb`
1. Run the migrations: `make migrate HOST_NAME=localhost NAME=vulcanize_public PORT=<postgres port, default 5432>`
1. `createdb vulcanize_public`
1. `cd $GOPATH/src/github.com/vulcanize/vulcanizedb`
1. Run the migrations: `make migrate HOST_NAME=localhost NAME=vulcanize_public PORT=5432`
- To rollback a single step: `make rollback NAME=vulcanize_public`
- To rollback to a certain migration: `make rollback_to MIGRATION=n NAME=vulcanize_public`
- To see status of migrations: `make migration_status NAME=vulcanize_public`
* See below for configuring additional environments
In some cases (such as recent Ubuntu systems), it may be necessary to overcome failures of password authentication from localhost. To allow access on Ubuntu, set localhost connections via hostname, ipv4, and ipv6 from peer/md5 to trust in: /etc/postgresql/<version>/pg_hba.conf
In some cases (such as recent Ubuntu systems), it may be necessary to overcome failures of password authentication from `localhost`. To allow access on Ubuntu, set localhost connections via hostname, ipv4, and ipv6 from `peer`/`md5` to `trust` in: `/etc/postgresql/<version>/pg_hba.conf`
(It should be noted that trusted auth should only be enabled on systems without sensitive data in them: development and local test databases)
(It should be noted that trusted auth should only be enabled on systems without sensitive data in them: development and local test databases.)
## Configuring Ethereum Node Integration
### Configuring a synced Ethereum node
- To use a local Ethereum node, copy `environments/public.toml.example` to
`environments/public.toml` and update the `ipcPath` and `levelDbPath`.
- `ipcPath` should match the local node's IPC filepath:
@ -71,7 +86,7 @@ In some cases (such as recent Ubuntu systems), it may be necessary to overcome f
- The default location is:
- Mac: `<full home path>/Library/Application\ Support/io.parity.ethereum/`
- Linux: `<full home path>/local/share/io.parity.ethereum/`
- `levelDbPath` should match Geth's chaindata directory path.
- The geth LevelDB chaindata path is printed to the console when you start geth.
- The default location is:
@ -79,95 +94,41 @@ In some cases (such as recent Ubuntu systems), it may be necessary to overcome f
- Linux: `<full home path>/ethereum/geth/chaindata`
- `levelDbPath` is irrelevant (and `coldImport` is currently unavailable) if only running parity.
- See `environments/infura.toml` to configure commands to run against infura, if a local node is unavailable. (Support is currently experimental, at this time.)
## Start syncing with postgres
Syncs VulcanizeDB with the configured Ethereum node.
1. Start the node
- If node state is not yet fully synced, Vulcanize will not be able to operate on the fetched data. You will need to wait for the initial sync to finish.
1. Start the vulcanize_db sync or lightSync
- Execute `./vulcanizedb sync --config <path to config.toml>`
- Or `./vulcanizedb lightSync --config <path to config.toml>`
- Or to sync from a specific block: `./vulcanizedb sync --config <config.toml> --starting-block-number <block-number>`
- Or `./vulcanizedb lightSync --config <config.toml> --starting-block-number <block-number>`
## Usage
Usage is broken up into two processes:
## Alternatively, sync from Geth's underlying LevelDB
Sync VulcanizeDB from the LevelDB underlying a Geth node.
1. Assure node is not running, and that it has synced to the desired block height.
1. Start vulcanize_db
- `./vulcanizedb coldImport --config <config.toml>`
1. Optional flags:
- `--starting-block-number <block number>`/`-s <block number>`: block number to start syncing from
- `--ending-block-number <block number>`/`-e <block number>`: block number to sync to
- `--all`/`-a`: sync all missing blocks
### Data syncing
To provide data for transformations, raw Ethereum data must first be synced into vDB.
This is accomplished through the use of the `lightSync`, `sync`, or `coldImport` commands.
These commands are described in detail [here](../staging/documentation/sync.md).
## Running the Tests
### Data transformation
Contract watchers use the raw data that has been synced into Postgres to filter out and apply transformations to specific data of interest.
In order to run the full test suite, a test database must be prepared. By default, the rests use a database named `vulcanize_private`. Create the database in Postgres, and run migrations on the new database in preparation for executing tests:
There is a built-in `contractWatcher` command which provides generic transformation of most contract data. This command is described in detail [here](../staging/documentation/contractWatcher.md).
`make migrate HOST_NAME=localhost NAME=vulcanize_private PORT=<postgres port, default 5432>`
In many cases a custom transformer or set of transformers will need to be written to provide complete or more comprehensive coverage or to optimize other aspects of the output for a specific end-use.
In this case we have provided the `compose`, `execute`, and `composeAndExecute` commands for running custom transformers from external repositories. This is described in detail [here](../staging/documentation/composeAndExecute.md).
Ginkgo is declared as a `dep` package test execution. Linting and tests can be run together via a provided `make` task:
`make test`
## Tests
- Replace the empty `ipcPath` in the `environments/infura.toml` with a path to a full node's eth_jsonrpc endpoint (e.g. local geth node ipc path or infura url)
- Note: integration tests require configuration with an archival node
- `createdb vulcanize_private` will create the test db
- `make migrate NAME=vulcanize_private` will run the db migrations
- `make test` will run the unit tests and skip the integration tests
- `make integrationtest` will run just the integration tests
Tests can be run directly via Ginkgo in the project's root directory:
## API
[Postgraphile](https://www.graphile.org/postgraphile/) is used to expose GraphQL endpoints for our database schemas, this is described in detail [here](../staging/postgraphile/README.md).
`ginkgo -r`
## Start full environment in docker by single command
## Contributing
Contributions are welcome! For more on this, please see [here](../staging/documentation/contributing.md).
### Geth Rinkeby
Small note: If editing the Readme, please conform to the [standard-readme specification](https://github.com/RichardLitt/standard-readme).
make command | description
------------------- | ----------------
rinkeby_env_up | start geth, postgres and rolling migrations, after migrations done starting vulcanizedb container
rinkeby_env_deploy | build and run vulcanizedb container in rinkeby environment
rinkeby_env_migrate | build and run rinkeby env migrations
rinkeby_env_down | stop and remove all rinkeby env containers
Success run of the VulcanizeDB container require full geth state sync,
attach to geth console and check sync state:
```bash
$ docker exec -it rinkeby_vulcanizedb_geth geth --rinkeby attach
...
> eth.syncing
false
```
If you have full rinkeby chaindata you can move it to `rinkeby_vulcanizedb_geth_data` docker volume to skip long wait of sync.
## omniWatcher and lightOmniWatcher
These commands require a pre-synced (full or light) vulcanizeDB (see above sections)
To watch all events of a contract using a light synced vDB:
- Execute `./vulcanizedb omniWatcher --config <path to config.toml> --contract-address <contract address>`
Or if you are using a full synced vDB, change the mode to full:
- Execute `./vulcanizedb omniWatcher --mode full --config <path to config.toml> --contract-address <contract address>`
To watch contracts on a network other than mainnet, use the network flag:
- Execute `./vulcanizedb omniWatcher --config <path to config.toml> --contract-address <contract address> --network <ropsten, kovan, or rinkeby>`
To watch events starting at a certain block use the starting block flag:
- Execute `./vulcanizedb omniWatcher --config <path to config.toml> --contract-address <contract address> --starting-block-number <#>`
To watch only specified events use the events flag:
- Execute `./vulcanizedb omniWatcher --config <path to config.toml> --contract-address <contract address> --events <EventName1> --events <EventName2>`
To watch events and poll the specified methods with any addresses and hashes emitted by the watched events utilize the methods flag:
- Execute `./vulcanizedb omniWatcher --config <path to config.toml> --contract-address <contract address> --methods <methodName1> --methods <methodName2>`
To watch specified events and poll the specified method with any addresses and hashes emitted by the watched events:
- Execute `./vulcanizedb omniWatcher --config <path to config.toml> --contract-address <contract address> --events <EventName1> --events <EventName2> --methods <methodName>`
To turn on method piping so that values returned from previous method calls are cached and used as arguments in subsequent method calls:
- Execute `./vulcanizedb omniWatcher --config <path to config.toml> --piping true --contract-address <contract address> --events <EventName1> --events <EventName2> --methods <methodName>`
To watch all types of events of the contract but only persist the ones that emit one of the filtered-for argument values:
- Execute `./vulcanizedb omniWatcher --config <path to config.toml> --contract-address <contract address> --event-args <arg1> --event-args <arg2>`
To watch all events of the contract but only poll the specified method with specified argument values (if they are emitted from the watched events):
- Execute `./vulcanizedb omniWatcher --config <path to config.toml> --contract-address <contract address> --methods <methodName> --method-args <arg1> --method-args <arg2>`
## License
[AGPL-3.0](../staging/LICENSE) © Vulcanize Inc

View File

@ -1,5 +1,5 @@
// VulcanizeDB
// Copyright © 2018 Vulcanize
// Copyright © 2019 Vulcanize
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as published by
@ -17,8 +17,7 @@
package cmd
import (
"log"
log "github.com/sirupsen/logrus"
"github.com/spf13/cobra"
"github.com/vulcanize/vulcanizedb/pkg/crypto"

187
cmd/compose.go Normal file
View File

@ -0,0 +1,187 @@
// Copyright © 2019 Vulcanize, Inc
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
//
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
package cmd
import (
"errors"
"fmt"
"strconv"
log "github.com/sirupsen/logrus"
"github.com/spf13/cobra"
"github.com/spf13/viper"
"github.com/vulcanize/vulcanizedb/pkg/config"
p2 "github.com/vulcanize/vulcanizedb/pkg/plugin"
)
// composeCmd represents the compose command
var composeCmd = &cobra.Command{
Use: "compose",
Short: "Composes transformer initializer plugin",
Long: `This command needs a config .toml file of form:
[database]
name = "vulcanize_public"
hostname = "localhost"
user = "vulcanize"
password = "vulcanize"
port = 5432
[client]
ipcPath = "/Users/user/Library/Ethereum/geth.ipc"
[exporter]
home = "github.com/vulcanize/vulcanizedb"
name = "exampleTransformerExporter"
save = false
transformerNames = [
"transformer1",
"transformer2",
"transformer3",
"transformer4",
]
[exporter.transformer1]
path = "path/to/transformer1"
type = "eth_event"
repository = "github.com/account/repo"
migrations = "db/migrations"
rank = "0"
[exporter.transformer2]
path = "path/to/transformer2"
type = "eth_contract"
repository = "github.com/account/repo"
migrations = "db/migrations"
rank = "0"
[exporter.transformer3]
path = "path/to/transformer3"
type = "eth_event"
repository = "github.com/account/repo"
migrations = "db/migrations"
rank = "0"
[exporter.transformer4]
path = "path/to/transformer4"
type = "eth_storage"
repository = "github.com/account2/repo2"
migrations = "to/db/migrations"
rank = "1"
Note: If any of the plugin transformer need additional
configuration variables include them in the .toml file as well
This information is used to write and build a go plugin with a transformer
set composed from the transformer imports specified in the config file
This plugin is loaded and the set of transformer initializers is exported
from it and loaded into and executed over by the appropriate watcher.
The type of watcher that the transformer works with is specified using the
type variable for each transformer in the config. Currently there are watchers
of event data from an eth node (eth_event) and storage data from an eth node
(eth_storage), and a more generic interface for accepting contract_watcher pkg
based transformers which can perform both event watching and public method
polling (eth_contract).
Transformers of different types can be ran together in the same command using a
single config file or in separate command instances using different config files
Specify config location when executing the command:
./vulcanizedb compose --config=./environments/config_name.toml`,
Run: func(cmd *cobra.Command, args []string) {
compose()
},
}
func compose() {
// Build plugin generator config
prepConfig()
// Generate code to build the plugin according to the config file
log.Info("generating plugin")
generator, err := p2.NewGenerator(genConfig, databaseConfig)
if err != nil {
log.Fatal(err)
}
err = generator.GenerateExporterPlugin()
if err != nil {
log.Debug("generating plugin failed")
log.Fatal(err)
}
// TODO: Embed versioning info in the .so files so we know which version of vulcanizedb to run them with
_, pluginPath, err := genConfig.GetPluginPaths()
if err != nil {
log.Fatal(err)
}
fmt.Printf("Composed plugin %s", pluginPath)
log.Info("plugin .so file output to", pluginPath)
}
func init() {
rootCmd.AddCommand(composeCmd)
}
func prepConfig() {
log.Info("configuring plugin")
names := viper.GetStringSlice("exporter.transformerNames")
transformers := make(map[string]config.Transformer)
for _, name := range names {
transformer := viper.GetStringMapString("exporter." + name)
p, pOK := transformer["path"]
if !pOK || p == "" {
log.Fatal(name, "transformer config is missing `path` value")
}
r, rOK := transformer["repository"]
if !rOK || r == "" {
log.Fatal(name, "transformer config is missing `repository` value")
}
m, mOK := transformer["migrations"]
if !mOK || m == "" {
log.Fatal(name, "transformer config is missing `migrations` value")
}
mr, mrOK := transformer["rank"]
if !mrOK || mr == "" {
log.Fatal(name, "transformer config is missing `rank` value")
}
rank, err := strconv.ParseUint(mr, 10, 64)
if err != nil {
log.Fatal(name, "migration `rank` can't be converted to an unsigned integer")
}
t, tOK := transformer["type"]
if !tOK {
log.Fatal(name, "transformer config is missing `type` value")
}
transformerType := config.GetTransformerType(t)
if transformerType == config.UnknownTransformerType {
log.Fatal(errors.New(`unknown transformer type in exporter config accepted types are "eth_event", "eth_storage"`))
}
transformers[name] = config.Transformer{
Path: p,
Type: transformerType,
RepositoryPath: r,
MigrationPath: m,
MigrationRank: rank,
}
}
genConfig = config.Plugin{
Transformers: transformers,
FilePath: "$GOPATH/src/github.com/vulcanize/vulcanizedb/plugins",
FileName: viper.GetString("exporter.name"),
Save: viper.GetBool("exporter.save"),
Home: viper.GetString("exporter.home"),
}
}

191
cmd/composeAndExecute.go Normal file
View File

@ -0,0 +1,191 @@
// Copyright © 2019 Vulcanize, Inc
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
//
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
package cmd
import (
log "github.com/sirupsen/logrus"
"github.com/spf13/cobra"
"github.com/vulcanize/vulcanizedb/libraries/shared/watcher"
"github.com/vulcanize/vulcanizedb/pkg/fs"
p2 "github.com/vulcanize/vulcanizedb/pkg/plugin"
"github.com/vulcanize/vulcanizedb/pkg/plugin/helpers"
"github.com/vulcanize/vulcanizedb/utils"
"os"
"plugin"
syn "sync"
)
// composeAndExecuteCmd represents the composeAndExecute command
var composeAndExecuteCmd = &cobra.Command{
Use: "composeAndExecute",
Short: "Composes, loads, and executes transformer initializer plugin",
Long: `This command needs a config .toml file of form:
[database]
name = "vulcanize_public"
hostname = "localhost"
user = "vulcanize"
password = "vulcanize"
port = 5432
[client]
ipcPath = "/Users/user/Library/Ethereum/geth.ipc"
[exporter]
home = "github.com/vulcanize/vulcanizedb"
name = "exampleTransformerExporter"
save = false
transformerNames = [
"transformer1",
"transformer2",
"transformer3",
"transformer4",
]
[exporter.transformer1]
path = "path/to/transformer1"
type = "eth_event"
repository = "github.com/account/repo"
migrations = "db/migrations"
rank = "0"
[exporter.transformer2]
path = "path/to/transformer2"
type = "eth_contract"
repository = "github.com/account/repo"
migrations = "db/migrations"
rank = "2"
[exporter.transformer3]
path = "path/to/transformer3"
type = "eth_event"
repository = "github.com/account/repo"
migrations = "db/migrations"
rank = "0"
[exporter.transformer4]
path = "path/to/transformer4"
type = "eth_storage"
repository = "github.com/account2/repo2"
migrations = "to/db/migrations"
rank = "1"
Note: If any of the plugin transformer need additional
configuration variables include them in the .toml file as well
This information is used to write and build a go plugin with a transformer
set composed from the transformer imports specified in the config file
This plugin is loaded and the set of transformer initializers is exported
from it and loaded into and executed over by the appropriate watcher.
The type of watcher that the transformer works with is specified using the
type variable for each transformer in the config. Currently there are watchers
of event data from an eth node (eth_event) and storage data from an eth node
(eth_storage), and a more generic interface for accepting contract_watcher pkg
based transformers which can perform both event watching and public method
polling (eth_contract).
Transformers of different types can be ran together in the same command using a
single config file or in separate command instances using different config files
Specify config location when executing the command:
./vulcanizedb composeAndExecute --config=./environments/config_name.toml`,
Run: func(cmd *cobra.Command, args []string) {
composeAndExecute()
},
}
func composeAndExecute() {
// Build plugin generator config
prepConfig()
// Generate code to build the plugin according to the config file
log.Info("generating plugin")
generator, err := p2.NewGenerator(genConfig, databaseConfig)
if err != nil {
log.Fatal(err)
}
err = generator.GenerateExporterPlugin()
if err != nil {
log.Debug("generating plugin failed")
log.Fatal(err)
}
// Get the plugin path and load the plugin
_, pluginPath, err := genConfig.GetPluginPaths()
if err != nil {
log.Fatal(err)
}
if !genConfig.Save {
defer helpers.ClearFiles(pluginPath)
}
log.Info("linking plugin", pluginPath)
plug, err := plugin.Open(pluginPath)
if err != nil {
log.Debug("linking plugin failed")
log.Fatal(err)
}
// Load the `Exporter` symbol from the plugin
log.Info("loading transformers from plugin")
symExporter, err := plug.Lookup("Exporter")
if err != nil {
log.Debug("loading Exporter symbol failed")
log.Fatal(err)
}
// Assert that the symbol is of type Exporter
exporter, ok := symExporter.(Exporter)
if !ok {
log.Debug("plugged-in symbol not of type Exporter")
os.Exit(1)
}
// Use the Exporters export method to load the EventTransformerInitializer, StorageTransformerInitializer, and ContractTransformerInitializer sets
ethEventInitializers, ethStorageInitializers, ethContractInitializers := exporter.Export()
// Setup bc and db objects
blockChain := getBlockChain()
db := utils.LoadPostgres(databaseConfig, blockChain.Node())
// Execute over transformer sets returned by the exporter
// Use WaitGroup to wait on both goroutines
var wg syn.WaitGroup
if len(ethEventInitializers) > 0 {
ew := watcher.NewEventWatcher(&db, blockChain)
ew.AddTransformers(ethEventInitializers)
wg.Add(1)
go watchEthEvents(&ew, &wg)
}
if len(ethStorageInitializers) > 0 {
tailer := fs.FileTailer{Path: storageDiffsPath}
sw := watcher.NewStorageWatcher(tailer, &db)
sw.AddTransformers(ethStorageInitializers)
wg.Add(1)
go watchEthStorage(&sw, &wg)
}
if len(ethContractInitializers) > 0 {
gw := watcher.NewContractWatcher(&db, blockChain)
gw.AddTransformers(ethContractInitializers)
wg.Add(1)
go watchEthContract(&gw, &wg)
}
wg.Wait()
}
func init() {
rootCmd.AddCommand(composeAndExecuteCmd)
composeAndExecuteCmd.Flags().BoolVar(&recheckHeadersArg, "recheckHeaders", false, "checks headers that are already checked for each transformer.")
}

125
cmd/contractWatcher.go Normal file
View File

@ -0,0 +1,125 @@
// VulcanizeDB
// Copyright © 2019 Vulcanize
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
package cmd
import (
"fmt"
"github.com/vulcanize/vulcanizedb/pkg/config"
"time"
log "github.com/sirupsen/logrus"
"github.com/spf13/cobra"
st "github.com/vulcanize/vulcanizedb/libraries/shared/transformer"
ft "github.com/vulcanize/vulcanizedb/pkg/contract_watcher/full/transformer"
lt "github.com/vulcanize/vulcanizedb/pkg/contract_watcher/light/transformer"
"github.com/vulcanize/vulcanizedb/utils"
)
// contractWatcherCmd represents the contractWatcher command
var contractWatcherCmd = &cobra.Command{
Use: "contractWatcher",
Short: "Watches events at the provided contract address using fully synced vDB",
Long: `Uses input contract address and event filters to watch events
Expects an ethereum node to be running
Expects an archival node synced into vulcanizeDB
Requires a .toml config file:
[database]
name = "vulcanize_public"
hostname = "localhost"
port = 5432
[client]
ipcPath = "/Users/user/Library/Ethereum/geth.ipc"
[contract]
network = ""
addresses = [
"contractAddress1",
"contractAddress2"
]
[contract.contractAddress1]
abi = 'ABI for contract 1'
startingBlock = 982463
[contract.contractAddress2]
abi = 'ABI for contract 2'
events = [
"event1",
"event2"
]
eventArgs = [
"arg1",
"arg2"
]
methods = [
"method1",
"method2"
]
methodArgs = [
"arg1",
"arg2"
]
startingBlock = 4448566
piping = true
`,
Run: func(cmd *cobra.Command, args []string) {
contractWatcher()
},
}
var (
mode string
)
func contractWatcher() {
ticker := time.NewTicker(5 * time.Second)
defer ticker.Stop()
blockChain := getBlockChain()
db := utils.LoadPostgres(databaseConfig, blockChain.Node())
var t st.ContractTransformer
con := config.ContractConfig{}
con.PrepConfig()
switch mode {
case "light":
t = lt.NewTransformer(con, blockChain, &db)
case "full":
t = ft.NewTransformer(con, blockChain, &db)
default:
log.Fatal("Invalid mode")
}
err := t.Init()
if err != nil {
log.Fatal(fmt.Sprintf("Failed to initialized transformer\r\nerr: %v\r\n", err))
}
for range ticker.C {
err = t.Execute()
if err != nil {
log.Error("Execution error for transformer:", t.GetConfig().Name, err)
}
}
}
func init() {
rootCmd.AddCommand(contractWatcherCmd)
contractWatcherCmd.Flags().StringVarP(&mode, "mode", "o", "light", "'light' or 'full' mode to work with either light synced or fully synced vDB (default is light)")
}

View File

@ -1,87 +0,0 @@
// VulcanizeDB
// Copyright © 2018 Vulcanize
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
package cmd
import (
"log"
"time"
"github.com/spf13/cobra"
"github.com/vulcanize/vulcanizedb/examples/erc20_watcher/event_triggered/dai"
"github.com/vulcanize/vulcanizedb/examples/erc20_watcher/every_block"
"github.com/vulcanize/vulcanizedb/examples/generic"
"github.com/vulcanize/vulcanizedb/libraries/shared"
"github.com/vulcanize/vulcanizedb/pkg/omni/shared/constants"
"github.com/vulcanize/vulcanizedb/utils"
)
// erc20Cmd represents the erc20 command
var erc20Cmd = &cobra.Command{
Use: "erc20",
Short: "Fetches and persists token supply",
Long: `Fetches transfer and approval events, totalSupply, allowances, and
balances for the configured token from each block and persists it in Vulcanize DB.
vulcanizedb erc20 --config environments/public
Expects an ethereum node to be running and requires a .toml config file:
[database]
name = "vulcanize_public"
hostname = "localhost"
port = 5432
[client]
ipcPath = "/Users/user/Library/Ethereum/geth.ipc"
`,
Run: func(cmd *cobra.Command, args []string) {
watchERC20s()
},
}
func watchERC20s() {
ticker := time.NewTicker(5 * time.Second)
defer ticker.Stop()
blockChain := getBlockChain()
db := utils.LoadPostgres(databaseConfig, blockChain.Node())
con := generic.DaiConfig
con.Filters = constants.DaiERC20Filters
watcher := shared.Watcher{
DB: db,
Blockchain: blockChain,
}
// It is important that the event transformer is executed before the every_block transformer
// because the events are used to generate the token holder address list that is used to
// collect balances and allowances at every block
transformers := append(dai.DaiEventTriggeredTransformerInitializer(), every_block.ERC20EveryBlockTransformerInitializer()...)
err := watcher.AddTransformers(transformers, con)
if err != nil {
log.Fatal(err)
}
for range ticker.C {
watcher.Execute()
}
}
func init() {
rootCmd.AddCommand(erc20Cmd)
}

182
cmd/execute.go Normal file
View File

@ -0,0 +1,182 @@
// Copyright © 2019 Vulcanize, Inc
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
//
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
package cmd
import (
"fmt"
"os"
"plugin"
syn "sync"
"time"
log "github.com/sirupsen/logrus"
"github.com/spf13/cobra"
"github.com/vulcanize/vulcanizedb/libraries/shared/constants"
"github.com/vulcanize/vulcanizedb/libraries/shared/transformer"
"github.com/vulcanize/vulcanizedb/libraries/shared/watcher"
"github.com/vulcanize/vulcanizedb/pkg/fs"
"github.com/vulcanize/vulcanizedb/utils"
)
// executeCmd represents the execute command
var executeCmd = &cobra.Command{
Use: "execute",
Short: "executes a precomposed transformer initializer plugin",
Long: `This command needs a config .toml file of form:
[database]
name = "vulcanize_public"
hostname = "localhost"
user = "vulcanize"
password = "vulcanize"
port = 5432
[client]
ipcPath = "/Users/user/Library/Ethereum/geth.ipc"
[exporter]
name = "exampleTransformerExporter"
Note: If any of the plugin transformer need additional
configuration variables include them in the .toml file as well
The exporter.name is the name (without extension) of the plugin to be loaded.
The plugin file needs to be located in the /plugins directory and this command assumes
the db migrations remain from when the plugin was composed. Additionally, the plugin
must have been composed by the same version of vulcanizedb or else it will not be compatible.
Specify config location when executing the command:
./vulcanizedb execute --config=./environments/config_name.toml`,
Run: func(cmd *cobra.Command, args []string) {
execute()
},
}
func execute() {
// Build plugin generator config
prepConfig()
// Get the plugin path and load the plugin
_, pluginPath, err := genConfig.GetPluginPaths()
if err != nil {
log.Fatal(err)
}
fmt.Printf("Executing plugin %s", pluginPath)
log.Info("linking plugin", pluginPath)
plug, err := plugin.Open(pluginPath)
if err != nil {
log.Debug("linking plugin failed")
log.Fatal(err)
}
// Load the `Exporter` symbol from the plugin
log.Info("loading transformers from plugin")
symExporter, err := plug.Lookup("Exporter")
if err != nil {
log.Debug("loading Exporter symbol failed")
log.Fatal(err)
}
// Assert that the symbol is of type Exporter
exporter, ok := symExporter.(Exporter)
if !ok {
log.Debug("plugged-in symbol not of type Exporter")
os.Exit(1)
}
// Use the Exporters export method to load the EventTransformerInitializer, StorageTransformerInitializer, and ContractTransformerInitializer sets
ethEventInitializers, ethStorageInitializers, ethContractInitializers := exporter.Export()
// Setup bc and db objects
blockChain := getBlockChain()
db := utils.LoadPostgres(databaseConfig, blockChain.Node())
// Execute over transformer sets returned by the exporter
// Use WaitGroup to wait on both goroutines
var wg syn.WaitGroup
if len(ethEventInitializers) > 0 {
ew := watcher.NewEventWatcher(&db, blockChain)
ew.AddTransformers(ethEventInitializers)
wg.Add(1)
go watchEthEvents(&ew, &wg)
}
if len(ethStorageInitializers) > 0 {
tailer := fs.FileTailer{Path: storageDiffsPath}
sw := watcher.NewStorageWatcher(tailer, &db)
sw.AddTransformers(ethStorageInitializers)
wg.Add(1)
go watchEthStorage(&sw, &wg)
}
if len(ethContractInitializers) > 0 {
gw := watcher.NewContractWatcher(&db, blockChain)
gw.AddTransformers(ethContractInitializers)
wg.Add(1)
go watchEthContract(&gw, &wg)
}
wg.Wait()
}
func init() {
rootCmd.AddCommand(executeCmd)
executeCmd.Flags().BoolVar(&recheckHeadersArg, "recheckHeaders", false, "checks headers that are already checked for each transformer.")
}
type Exporter interface {
Export() ([]transformer.EventTransformerInitializer, []transformer.StorageTransformerInitializer, []transformer.ContractTransformerInitializer)
}
func watchEthEvents(w *watcher.EventWatcher, wg *syn.WaitGroup) {
defer wg.Done()
// Execute over the EventTransformerInitializer set using the watcher
log.Info("executing event transformers")
var recheck constants.TransformerExecution
if recheckHeadersArg {
recheck = constants.HeaderRecheck
} else {
recheck = constants.HeaderMissing
}
ticker := time.NewTicker(pollingInterval)
defer ticker.Stop()
for range ticker.C {
w.Execute(recheck)
}
}
func watchEthStorage(w *watcher.StorageWatcher, wg *syn.WaitGroup) {
defer wg.Done()
// Execute over the StorageTransformerInitializer set using the storage watcher
log.Info("executing storage transformers")
ticker := time.NewTicker(pollingInterval)
defer ticker.Stop()
for range ticker.C {
w.Execute()
}
}
func watchEthContract(w *watcher.ContractWatcher, wg *syn.WaitGroup) {
defer wg.Done()
// Execute over the ContractTransformerInitializer set using the contract watcher
log.Info("executing contract_watcher transformers")
ticker := time.NewTicker(pollingInterval)
defer ticker.Stop()
for range ticker.C {
w.Execute()
}
}

View File

@ -1,5 +1,5 @@
// VulcanizeDB
// Copyright © 2018 Vulcanize
// Copyright © 2019 Vulcanize
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as published by
@ -17,10 +17,9 @@
package cmd
import (
"log"
"os"
"time"
log "github.com/sirupsen/logrus"
"github.com/spf13/cobra"
"github.com/vulcanize/vulcanizedb/pkg/core"
@ -37,12 +36,16 @@ var lightSyncCmd = &cobra.Command{
Short: "Syncs VulcanizeDB with local ethereum node's block headers",
Long: `Syncs VulcanizeDB with local ethereum node. Populates
Postgres with block headers.
./vulcanizedb lightSync --starting-block-number 0 --config public.toml
Expects ethereum node to be running and requires a .toml config:
[database]
name = "vulcanize_public"
hostname = "localhost"
port = 5432
[client]
ipcPath = "/Users/user/Library/Ethereum/geth.ipc"
`,
@ -59,7 +62,9 @@ func init() {
func backFillAllHeaders(blockchain core.BlockChain, headerRepository datastore.HeaderRepository, missingBlocksPopulated chan int, startingBlockNumber int64) {
populated, err := history.PopulateMissingHeaders(blockchain, headerRepository, startingBlockNumber)
if err != nil {
log.Fatal("Error populating headers: ", err)
// TODO Lots of possible errors in the call stack above. If errors occur, we still put
// 0 in the channel, triggering another round
log.Error("backfillAllHeaders: Error populating headers: ", err)
}
missingBlocksPopulated <- populated
}
@ -79,20 +84,29 @@ func lightSync() {
for {
select {
case <-ticker.C:
window := validator.ValidateHeaders()
window.Log(os.Stdout)
case <-missingBlocksPopulated:
window, err := validator.ValidateHeaders()
if err != nil {
log.Error("lightSync: ValidateHeaders failed: ", err)
}
log.Info(window.GetString())
case n := <-missingBlocksPopulated:
if n == 0 {
time.Sleep(3 * time.Second)
}
go backFillAllHeaders(blockChain, headerRepository, missingBlocksPopulated, startingBlockNumber)
}
}
}
func validateArgs(blockChain *geth.BlockChain) {
lastBlock := blockChain.LastBlock().Int64()
if lastBlock == 0 {
lastBlock, err := blockChain.LastBlock()
if err != nil {
log.Error("validateArgs: Error getting last block: ", err)
}
if lastBlock.Int64() == 0 {
log.Fatal("geth initial: state sync not finished")
}
if startingBlockNumber > lastBlock {
if startingBlockNumber > lastBlock.Int64() {
log.Fatal("starting block number > current block number")
}
}

View File

@ -1,125 +0,0 @@
// VulcanizeDB
// Copyright © 2018 Vulcanize
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
package cmd
import (
"fmt"
"log"
"time"
"github.com/spf13/cobra"
"github.com/vulcanize/vulcanizedb/libraries/shared"
ft "github.com/vulcanize/vulcanizedb/pkg/omni/full/transformer"
lt "github.com/vulcanize/vulcanizedb/pkg/omni/light/transformer"
st "github.com/vulcanize/vulcanizedb/pkg/omni/shared/transformer"
"github.com/vulcanize/vulcanizedb/utils"
)
// omniWatcherCmd represents the omniWatcher command
var omniWatcherCmd = &cobra.Command{
Use: "omniWatcher",
Short: "Watches events at the provided contract address using fully synced vDB",
Long: `Uses input contract address and event filters to watch events
Expects an ethereum node to be running
Expects an archival node synced into vulcanizeDB
Requires a .toml config file:
[database]
name = "vulcanize_public"
hostname = "localhost"
port = 5432
[client]
ipcPath = "/Users/user/Library/Ethereum/geth.ipc"
`,
Run: func(cmd *cobra.Command, args []string) {
omniWatcher()
},
}
var (
network string
contractAddress string
contractAddresses []string
contractEvents []string
contractMethods []string
eventArgs []string
methodArgs []string
methodPiping bool
mode string
)
func omniWatcher() {
if contractAddress == "" && len(contractAddresses) == 0 {
log.Fatal("Contract address required")
}
ticker := time.NewTicker(5 * time.Second)
defer ticker.Stop()
blockChain := getBlockChain()
db := utils.LoadPostgres(databaseConfig, blockChain.Node())
var t st.Transformer
switch mode {
case "light":
t = lt.NewTransformer(network, blockChain, &db)
case "full":
t = ft.NewTransformer(network, blockChain, &db)
default:
log.Fatal("Invalid mode")
}
contractAddresses = append(contractAddresses, contractAddress)
for _, addr := range contractAddresses {
t.SetEvents(addr, contractEvents)
t.SetMethods(addr, contractMethods)
t.SetEventArgs(addr, eventArgs)
t.SetMethodArgs(addr, methodArgs)
t.SetPiping(addr, methodPiping)
t.SetStartingBlock(addr, startingBlockNumber)
}
err := t.Init()
if err != nil {
log.Fatal(fmt.Sprintf("Failed to initialized transformer\r\nerr: %v\r\n", err))
}
w := shared.Watcher{}
w.AddTransformer(t)
for range ticker.C {
w.Execute()
}
}
func init() {
rootCmd.AddCommand(omniWatcherCmd)
omniWatcherCmd.Flags().StringVarP(&mode, "mode", "o", "light", "'light' or 'full' mode to work with either light synced or fully synced vDB (default is light)")
omniWatcherCmd.Flags().StringVarP(&contractAddress, "contract-address", "a", "", "Single address to generate watchers for")
omniWatcherCmd.Flags().StringArrayVarP(&contractAddresses, "contract-addresses", "l", []string{}, "list of addresses to use; warning: watcher targets the same events and methods for each address")
omniWatcherCmd.Flags().StringArrayVarP(&contractEvents, "events", "e", []string{}, "Subset of events to watch; by default all events are watched")
omniWatcherCmd.Flags().StringArrayVarP(&contractMethods, "methods", "m", nil, "Subset of methods to poll; by default no methods are polled")
omniWatcherCmd.Flags().StringArrayVarP(&eventArgs, "event-args", "f", []string{}, "Argument values to filter event logs for; will only persist event logs that emit at least one of the value specified")
omniWatcherCmd.Flags().StringArrayVarP(&methodArgs, "method-args", "g", []string{}, "Argument values to limit methods to; will only call methods with emitted values that were specified here")
omniWatcherCmd.Flags().StringVarP(&network, "network", "n", "", `Network the contract is deployed on; options: "ropsten", "kovan", and "rinkeby"; default is mainnet"`)
omniWatcherCmd.Flags().Int64VarP(&startingBlockNumber, "starting-block-number", "s", 0, "Block to begin watching- default is first block the contract exists")
omniWatcherCmd.Flags().BoolVarP(&methodPiping, "piping", "p", false, "Turn on method output piping: methods listed first will be polled first and their output used as input to subsequent methods")
}

View File

@ -1,5 +1,5 @@
// VulcanizeDB
// Copyright © 2018 Vulcanize
// Copyright © 2019 Vulcanize
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as published by
@ -18,12 +18,13 @@ package cmd
import (
"fmt"
"log"
"os"
"strings"
"time"
"github.com/ethereum/go-ethereum/ethclient"
"github.com/ethereum/go-ethereum/rpc"
"github.com/mitchellh/go-homedir"
log "github.com/sirupsen/logrus"
"github.com/spf13/cobra"
"github.com/spf13/viper"
@ -37,11 +38,19 @@ import (
var (
cfgFile string
databaseConfig config.Database
genConfig config.Plugin
ipc string
levelDbPath string
startingBlockNumber int64
storageDiffsPath string
syncAll bool
endingBlockNumber int64
recheckHeadersArg bool
)
const (
pollingInterval = 7 * time.Second
validationWindow = 15
)
var rootCmd = &cobra.Command{
@ -50,8 +59,9 @@ var rootCmd = &cobra.Command{
}
func Execute() {
log.Info("----- Starting vDB -----")
if err := rootCmd.Execute(); err != nil {
fmt.Println(err)
log.Fatal(err)
os.Exit(1)
}
}
@ -59,6 +69,7 @@ func Execute() {
func database(cmd *cobra.Command, args []string) {
ipc = viper.GetString("client.ipcpath")
levelDbPath = viper.GetString("client.leveldbpath")
storageDiffsPath = viper.GetString("filesystem.storageDiffsPath")
databaseConfig = config.Database{
Name: viper.GetString("database.name"),
Hostname: viper.GetString("database.hostname"),
@ -71,8 +82,11 @@ func database(cmd *cobra.Command, args []string) {
func init() {
cobra.OnInitialize(initConfig)
// When searching for env variables, replace dots in config keys with underscores
viper.SetEnvKeyReplacer(strings.NewReplacer(".", "_"))
viper.AutomaticEnv()
rootCmd.PersistentFlags().StringVar(&cfgFile, "config", "environment/public.toml", "config file location")
rootCmd.PersistentFlags().StringVar(&cfgFile, "config", "", "config file location")
rootCmd.PersistentFlags().String("database-name", "vulcanize_public", "database name")
rootCmd.PersistentFlags().Int("database-port", 5432, "database port")
rootCmd.PersistentFlags().String("database-hostname", "localhost", "database hostname")
@ -80,6 +94,8 @@ func init() {
rootCmd.PersistentFlags().String("database-password", "", "database password")
rootCmd.PersistentFlags().String("client-ipcPath", "", "location of geth.ipc file")
rootCmd.PersistentFlags().String("client-levelDbPath", "", "location of levelDb chaindata")
rootCmd.PersistentFlags().String("filesystem-storageDiffsPath", "", "location of storage diffs csv file")
rootCmd.PersistentFlags().String("exporter-name", "exporter", "name of exporter plugin")
viper.BindPFlag("database.name", rootCmd.PersistentFlags().Lookup("database-name"))
viper.BindPFlag("database.port", rootCmd.PersistentFlags().Lookup("database-port"))
@ -88,26 +104,27 @@ func init() {
viper.BindPFlag("database.password", rootCmd.PersistentFlags().Lookup("database-password"))
viper.BindPFlag("client.ipcPath", rootCmd.PersistentFlags().Lookup("client-ipcPath"))
viper.BindPFlag("client.levelDbPath", rootCmd.PersistentFlags().Lookup("client-levelDbPath"))
viper.BindPFlag("filesystem.storageDiffsPath", rootCmd.PersistentFlags().Lookup("filesystem-storageDiffsPath"))
viper.BindPFlag("exporter.fileName", rootCmd.PersistentFlags().Lookup("exporter-name"))
}
func initConfig() {
if cfgFile != "" {
viper.SetConfigFile(cfgFile)
} else {
home, err := homedir.Dir()
if err != nil {
fmt.Println(err)
os.Exit(1)
}
viper.AddConfigPath(home)
viper.SetConfigName(".vulcanizedb")
noConfigError := "No config file passed with --config flag"
fmt.Println("Error: ", noConfigError)
log.Fatal(noConfigError)
os.Exit(1)
}
viper.AutomaticEnv()
if err := viper.ReadInConfig(); err == nil {
fmt.Printf("Using config file: %s\n\n", viper.ConfigFileUsed())
log.Printf("Using config file: %s\n\n", viper.ConfigFileUsed())
} else {
invalidConfigError := "Couldn't read config file"
fmt.Println("Error: ", invalidConfigError)
log.Fatal(invalidConfigError)
os.Exit(1)
}
}

View File

@ -1,5 +1,5 @@
// VulcanizeDB
// Copyright © 2018 Vulcanize
// Copyright © 2019 Vulcanize
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as published by
@ -17,10 +17,9 @@
package cmd
import (
"log"
"os"
"time"
log "github.com/sirupsen/logrus"
"github.com/spf13/cobra"
"github.com/vulcanize/vulcanizedb/pkg/core"
@ -54,11 +53,6 @@ Expects ethereum node to be running and requires a .toml config:
},
}
const (
pollingInterval = 7 * time.Second
validationWindow = 15
)
func init() {
rootCmd.AddCommand(syncCmd)
@ -66,7 +60,11 @@ func init() {
}
func backFillAllBlocks(blockchain core.BlockChain, blockRepository datastore.BlockRepository, missingBlocksPopulated chan int, startingBlockNumber int64) {
missingBlocksPopulated <- history.PopulateMissingBlocks(blockchain, blockRepository, startingBlockNumber)
populated, err := history.PopulateMissingBlocks(blockchain, blockRepository, startingBlockNumber)
if err != nil {
log.Error("backfillAllBlocks: error in populateMissingBlocks: ", err)
}
missingBlocksPopulated <- populated
}
func sync() {
@ -74,13 +72,15 @@ func sync() {
defer ticker.Stop()
blockChain := getBlockChain()
lastBlock := blockChain.LastBlock().Int64()
if lastBlock == 0 {
lastBlock, err := blockChain.LastBlock()
if err != nil {
log.Error("sync: Error getting last block: ", err)
}
if lastBlock.Int64() == 0 {
log.Fatal("geth initial: state sync not finished")
}
if startingBlockNumber > lastBlock {
log.Fatal("starting block number > current block number")
if startingBlockNumber > lastBlock.Int64() {
log.Fatal("sync: starting block number > current block number")
}
db := utils.LoadPostgres(databaseConfig, blockChain.Node())
@ -92,8 +92,11 @@ func sync() {
for {
select {
case <-ticker.C:
window := validator.ValidateBlocks()
window.Log(os.Stdout)
window, err := validator.ValidateBlocks()
if err != nil {
log.Error("sync: error in validateBlocks: ", err)
}
log.Info(window.GetString())
case <-missingBlocksPopulated:
go backFillAllBlocks(blockChain, blockRepository, missingBlocksPopulated, startingBlockNumber)
}

View File

@ -0,0 +1,23 @@
-- +goose Up
CREATE TABLE public.blocks (
id SERIAL PRIMARY KEY,
difficulty BIGINT,
extra_data VARCHAR,
gas_limit BIGINT,
gas_used BIGINT,
hash VARCHAR(66),
miner VARCHAR(42),
nonce VARCHAR(20),
"number" BIGINT,
parent_hash VARCHAR(66),
reward NUMERIC,
uncles_reward NUMERIC,
"size" VARCHAR,
"time" BIGINT,
is_final BOOLEAN,
uncle_hash VARCHAR(66)
);
-- +goose Down
DROP TABLE public.blocks;

View File

@ -0,0 +1,18 @@
-- +goose Up
CREATE TABLE full_sync_transactions (
id SERIAL PRIMARY KEY,
block_id INTEGER NOT NULL REFERENCES blocks(id) ON DELETE CASCADE,
gas_limit NUMERIC,
gas_price NUMERIC,
hash VARCHAR(66),
input_data BYTEA,
nonce NUMERIC,
raw BYTEA,
tx_from VARCHAR(66),
tx_index INTEGER,
tx_to VARCHAR(66),
"value" NUMERIC
);
-- +goose Down
DROP TABLE full_sync_transactions;

View File

@ -0,0 +1,6 @@
-- +goose Up
CREATE INDEX number_index ON blocks (number);
-- +goose Down
DROP INDEX number_index;

View File

@ -0,0 +1,10 @@
-- +goose Up
CREATE TABLE watched_contracts
(
contract_id SERIAL PRIMARY KEY,
contract_abi json,
contract_hash VARCHAR(66) UNIQUE
);
-- +goose Down
DROP TABLE watched_contracts;

View File

@ -0,0 +1,12 @@
-- +goose Up
CREATE TABLE nodes (
id SERIAL PRIMARY KEY,
client_name VARCHAR,
genesis_block VARCHAR(66),
network_id NUMERIC,
node_id VARCHAR(128),
CONSTRAINT node_uc UNIQUE (genesis_block, network_id, node_id)
);
-- +goose Down
DROP TABLE nodes;

View File

@ -1,12 +1,11 @@
BEGIN;
ALTER TABLE blocks
DROP CONSTRAINT node_fk;
-- +goose Up
ALTER TABLE blocks
ADD COLUMN node_id INTEGER NOT NULL,
ADD CONSTRAINT node_fk
FOREIGN KEY (node_id)
REFERENCES nodes (id)
ON DELETE CASCADE;
COMMIT;
-- +goose Down
ALTER TABLE blocks
DROP COLUMN node_id;

View File

@ -1,3 +1,4 @@
-- +goose Up
CREATE TABLE logs (
id SERIAL PRIMARY KEY,
block_number BIGINT,
@ -12,3 +13,7 @@ CREATE TABLE logs (
CONSTRAINT log_uc UNIQUE (block_number, index)
);
-- +goose Down
DROP TABLE logs;

View File

@ -0,0 +1,7 @@
-- +goose Up
ALTER TABLE blocks
ADD CONSTRAINT node_id_block_number_uc UNIQUE (number, node_id);
-- +goose Down
ALTER TABLE blocks
DROP CONSTRAINT node_id_block_number_uc;

View File

@ -0,0 +1,5 @@
-- +goose Up
CREATE INDEX block_id_index ON full_sync_transactions (block_id);
-- +goose Down
DROP INDEX block_id_index;

View File

@ -0,0 +1,5 @@
-- +goose Up
CREATE INDEX node_id_index ON blocks (node_id);
-- +goose Down
DROP INDEX node_id_index;

View File

@ -0,0 +1,5 @@
-- +goose Up
CREATE INDEX tx_to_index ON full_sync_transactions(tx_to);
-- +goose Down
DROP INDEX tx_to_index;

View File

@ -0,0 +1,5 @@
-- +goose Up
CREATE INDEX tx_from_index ON full_sync_transactions(tx_from);
-- +goose Down
DROP INDEX tx_from_index;

View File

@ -0,0 +1,16 @@
-- +goose Up
CREATE TABLE full_sync_receipts
(
id SERIAL PRIMARY KEY,
transaction_id INTEGER NOT NULL REFERENCES full_sync_transactions (id) ON DELETE CASCADE,
contract_address VARCHAR(42),
cumulative_gas_used NUMERIC,
gas_used NUMERIC,
state_root VARCHAR(66),
status INTEGER,
tx_hash VARCHAR(66)
);
-- +goose Down
DROP TABLE full_sync_receipts;

View File

@ -0,0 +1,5 @@
-- +goose Up
CREATE INDEX transaction_id_index ON full_sync_receipts (transaction_id);
-- +goose Down
DROP INDEX transaction_id_index;

View File

@ -0,0 +1,23 @@
-- +goose Up
ALTER TABLE logs
DROP CONSTRAINT log_uc;
ALTER TABLE logs
ADD COLUMN receipt_id INT;
ALTER TABLE logs
ADD CONSTRAINT receipts_fk
FOREIGN KEY (receipt_id)
REFERENCES full_sync_receipts (id)
ON DELETE CASCADE;
-- +goose Down
ALTER TABLE logs
DROP CONSTRAINT receipts_fk;
ALTER TABLE logs
DROP COLUMN receipt_id;
ALTER TABLE logs
ADD CONSTRAINT log_uc UNIQUE (block_number, index);

View File

@ -1,3 +1,4 @@
-- +goose Up
CREATE TABLE log_filters (
id SERIAL,
name VARCHAR NOT NULL CHECK (name <> ''),
@ -9,4 +10,7 @@ CREATE TABLE log_filters (
topic2 VARCHAR(66),
topic3 VARCHAR(66),
CONSTRAINT name_uc UNIQUE (name)
);
);
-- +goose Down
DROP TABLE log_filters;

View File

@ -1,3 +1,4 @@
-- +goose Up
CREATE VIEW block_stats AS
SELECT
max(block_number) AS max_block,
@ -26,4 +27,8 @@ CREATE VIEW watched_event_logs AS
WHERE (log_filters.topic0 = logs.topic0 OR log_filters.topic0 ISNULL)
AND (log_filters.topic1 = logs.topic1 OR log_filters.topic1 ISNULL)
AND (log_filters.topic2 = logs.topic2 OR log_filters.topic2 ISNULL)
AND (log_filters.topic3 = logs.topic3 OR log_filters.topic3 ISNULL);
AND (log_filters.topic3 = logs.topic3 OR log_filters.topic3 ISNULL);
-- +goose Down
DROP VIEW watched_event_logs;
DROP VIEW block_stats;

View File

@ -0,0 +1,14 @@
-- +goose Up
ALTER TABLE log_filters
DROP CONSTRAINT log_filters_from_block_check1;
ALTER TABLE log_filters
ADD CONSTRAINT log_filters_to_block_check CHECK (to_block >= 0);
-- +goose Down
ALTER TABLE log_filters
DROP CONSTRAINT log_filters_to_block_check;
ALTER TABLE log_filters
ADD CONSTRAINT log_filters_from_block_check1 CHECK (to_block >= 0);

View File

@ -0,0 +1,43 @@
-- +goose Up
ALTER TABLE public.nodes RENAME TO eth_nodes;
ALTER TABLE public.eth_nodes RENAME COLUMN node_id TO eth_node_id;
ALTER TABLE public.eth_nodes DROP CONSTRAINT node_uc;
ALTER TABLE public.eth_nodes
ADD CONSTRAINT eth_node_uc UNIQUE (genesis_block, network_id, eth_node_id);
ALTER TABLE public.blocks RENAME COLUMN node_id TO eth_node_id;
ALTER TABLE public.blocks DROP CONSTRAINT node_id_block_number_uc;
ALTER TABLE public.blocks
ADD CONSTRAINT eth_node_id_block_number_uc UNIQUE (number, eth_node_id);
ALTER TABLE public.blocks DROP CONSTRAINT node_fk;
ALTER TABLE public.blocks
ADD CONSTRAINT node_fk
FOREIGN KEY (eth_node_id) REFERENCES eth_nodes (id) ON DELETE CASCADE;
-- +goose Down
ALTER TABLE public.eth_nodes
RENAME TO nodes;
ALTER TABLE public.nodes
RENAME COLUMN eth_node_id TO node_id;
ALTER TABLE public.nodes
DROP CONSTRAINT eth_node_uc;
ALTER TABLE public.nodes
ADD CONSTRAINT node_uc UNIQUE (genesis_block, network_id, node_id);
ALTER TABLE public.blocks RENAME COLUMN eth_node_id TO node_id;
ALTER TABLE public.blocks DROP CONSTRAINT eth_node_id_block_number_uc;
ALTER TABLE public.blocks
ADD CONSTRAINT node_id_block_number_uc UNIQUE (number, node_id);
ALTER TABLE public.blocks DROP CONSTRAINT node_fk;
ALTER TABLE public.blocks
ADD CONSTRAINT node_fk
FOREIGN KEY (node_id) REFERENCES nodes (id) ON DELETE CASCADE;

View File

@ -0,0 +1,44 @@
-- +goose Up
ALTER TABLE full_sync_receipts
ADD COLUMN block_id INT;
UPDATE full_sync_receipts
SET block_id = (
SELECT block_id FROM full_sync_transactions WHERE full_sync_transactions.id = full_sync_receipts.transaction_id
);
ALTER TABLE full_sync_receipts
ALTER COLUMN block_id SET NOT NULL;
ALTER TABLE full_sync_receipts
ADD CONSTRAINT blocks_fk
FOREIGN KEY (block_id)
REFERENCES blocks (id)
ON DELETE CASCADE;
ALTER TABLE full_sync_receipts
DROP COLUMN transaction_id;
-- +goose Down
ALTER TABLE full_sync_receipts
ADD COLUMN transaction_id INT;
CREATE INDEX transaction_id_index ON full_sync_receipts (transaction_id);
UPDATE full_sync_receipts
SET transaction_id = (
SELECT id FROM full_sync_transactions WHERE full_sync_transactions.hash = full_sync_receipts.tx_hash
);
ALTER TABLE full_sync_receipts
ALTER COLUMN transaction_id SET NOT NULL;
ALTER TABLE full_sync_receipts
ADD CONSTRAINT transaction_fk
FOREIGN KEY (transaction_id)
REFERENCES full_sync_transactions (id)
ON DELETE CASCADE;
ALTER TABLE full_sync_receipts
DROP COLUMN block_id;

View File

@ -1,5 +1,4 @@
BEGIN;
-- +goose Up
ALTER TABLE blocks
ADD COLUMN eth_node_fingerprint VARCHAR(128);
@ -11,4 +10,7 @@ UPDATE blocks
ALTER TABLE blocks
ALTER COLUMN eth_node_fingerprint SET NOT NULL;
COMMIT;
-- +goose Down
ALTER TABLE blocks
DROP COLUMN eth_node_fingerprint;

View File

@ -0,0 +1,17 @@
-- +goose Up
CREATE TABLE public.headers (
id SERIAL PRIMARY KEY,
hash VARCHAR(66),
block_number BIGINT,
raw JSONB,
block_timestamp NUMERIC,
eth_node_id INTEGER NOT NULL REFERENCES eth_nodes (id) ON DELETE CASCADE,
eth_node_fingerprint VARCHAR(128)
);
-- Index is removed when table is
CREATE INDEX headers_block_number ON public.headers (block_number);
-- +goose Down
DROP TABLE public.headers;

View File

@ -1,4 +1,8 @@
-- +goose Up
CREATE TABLE public.checked_headers (
id SERIAL PRIMARY KEY,
header_id INTEGER UNIQUE NOT NULL REFERENCES headers (id) ON DELETE CASCADE
);
);
-- +goose Down
DROP TABLE public.checked_headers;

View File

@ -0,0 +1,12 @@
-- +goose Up
CREATE TABLE public.queued_storage (
id SERIAL PRIMARY KEY,
block_height BIGINT,
block_hash BYTEA,
contract BYTEA,
storage_key BYTEA,
storage_value BYTEA
);
-- +goose Down
DROP TABLE public.queued_storage;

View File

@ -0,0 +1,19 @@
-- +goose Up
CREATE TABLE light_sync_transactions (
id SERIAL PRIMARY KEY,
header_id INTEGER NOT NULL REFERENCES headers(id) ON DELETE CASCADE,
hash VARCHAR(66),
gas_limit NUMERIC,
gas_price NUMERIC,
input_data BYTEA,
nonce NUMERIC,
raw BYTEA,
tx_from VARCHAR(44),
tx_index INTEGER,
tx_to VARCHAR(44),
"value" NUMERIC,
UNIQUE (header_id, hash)
);
-- +goose Down
DROP TABLE light_sync_transactions;

View File

@ -0,0 +1,18 @@
-- +goose Up
CREATE TABLE light_sync_receipts(
id SERIAL PRIMARY KEY,
transaction_id INTEGER NOT NULL REFERENCES light_sync_transactions(id) ON DELETE CASCADE,
header_id INTEGER NOT NULL REFERENCES headers(id) ON DELETE CASCADE,
contract_address VARCHAR(42),
cumulative_gas_used NUMERIC,
gas_used NUMERIC,
state_root VARCHAR(66),
status INTEGER,
tx_hash VARCHAR(66),
rlp BYTEA,
UNIQUE(header_id, transaction_id)
);
-- +goose Down
DROP TABLE light_sync_receipts;

View File

@ -0,0 +1,16 @@
-- +goose Up
CREATE TABLE public.uncles (
id SERIAL PRIMARY KEY,
hash VARCHAR(66) NOT NULL,
block_id INTEGER NOT NULL REFERENCES blocks (id) ON DELETE CASCADE,
reward NUMERIC NOT NULL,
miner VARCHAR(42) NOT NULL,
raw JSONB,
block_timestamp NUMERIC,
eth_node_id INTEGER NOT NULL REFERENCES eth_nodes (id) ON DELETE CASCADE,
eth_node_fingerprint VARCHAR(128),
UNIQUE (block_id, hash)
);
-- +goose Down
DROP TABLE public.uncles;

View File

@ -1 +0,0 @@
DROP TABLE public.blocks

View File

@ -1,4 +0,0 @@
CREATE TABLE public.blocks
(
block_number BIGINT
)

View File

@ -1,5 +0,0 @@
ALTER TABLE blocks
DROP COLUMN block_gaslimit,
DROP COLUMN block_gasused,
DROP COLUMN block_time;

View File

@ -1,4 +0,0 @@
ALTER TABLE blocks
ADD COLUMN block_gaslimit DOUBLE PRECISION,
ADD COLUMN block_gasused DOUBLE PRECISION,
ADD COLUMN block_time DOUBLE PRECISION;

View File

@ -1 +0,0 @@
DROP TABLE transactions

View File

@ -1,10 +0,0 @@
CREATE TABLE transactions
(
id SERIAL PRIMARY KEY,
tx_hash VARCHAR(66),
tx_nonce NUMERIC,
tx_to varchar(66),
tx_gaslimit NUMERIC,
tx_gasprice NUMERIC,
tx_value NUMERIC
)

View File

@ -1 +0,0 @@
ALTER TABLE blocks DROP id

View File

@ -1 +0,0 @@
ALTER TABLE blocks ADD COLUMN id SERIAL PRIMARY KEY

View File

@ -1,2 +0,0 @@
ALTER TABLE transactions
DROP COLUMN block_id

View File

@ -1,5 +0,0 @@
ALTER TABLE transactions
ADD COLUMN block_id INTEGER NOT NULL,
ADD CONSTRAINT fk_test
FOREIGN KEY (block_id)
REFERENCES blocks (id)

View File

@ -1 +0,0 @@
DROP INDEX block_number_index;

View File

@ -1 +0,0 @@
CREATE INDEX block_number_index ON blocks (block_number);

View File

@ -1,7 +0,0 @@
ALTER TABLE blocks
Drop COLUMN block_difficulty,
Drop COLUMN block_hash,
drop COLUMN block_nonce,
drop COLUMN block_parenthash,
drop COLUMN block_size,
drop COLUMN uncle_hash

View File

@ -1,7 +0,0 @@
ALTER TABLE blocks
ADD COLUMN block_difficulty BIGINT,
ADD COLUMN block_hash VARCHAR(66),
ADD COLUMN block_nonce VARCHAR(20),
ADD COLUMN block_parenthash VARCHAR(66),
ADD COLUMN block_size BIGINT,
ADD COLUMN uncle_hash VARCHAR(66)

View File

@ -1 +0,0 @@
DROP TABLE watched_contracts

View File

@ -1,5 +0,0 @@
CREATE TABLE watched_contracts
(
contract_id SERIAL PRIMARY KEY,
contract_hash VARCHAR(66)
)

View File

@ -1,2 +0,0 @@
ALTER TABLE transactions
DROP COLUMN tx_from

View File

@ -1,2 +0,0 @@
ALTER TABLE transactions
ADD COLUMN tx_from VARCHAR(66)

View File

@ -1,2 +0,0 @@
ALTER TABLE watched_contracts
DROP COLUMN contract_abi;

View File

@ -1,2 +0,0 @@
ALTER TABLE watched_contracts
ADD COLUMN contract_abi json;

View File

@ -1 +0,0 @@
DROP TABLE nodes;

View File

@ -1,6 +0,0 @@
CREATE TABLE nodes (
id SERIAL PRIMARY KEY,
genesis_block VARCHAR(66),
network_id NUMERIC,
CONSTRAINT node_uc UNIQUE (genesis_block, network_id)
);

View File

@ -1,2 +0,0 @@
ALTER TABLE blocks
DROP COLUMN node_id;

View File

@ -1,5 +0,0 @@
ALTER TABLE blocks
ADD COLUMN node_id INTEGER NOT NULL,
ADD CONSTRAINT node_fk
FOREIGN KEY (node_id)
REFERENCES nodes (id);

View File

@ -1,2 +0,0 @@
ALTER TABLE watched_contracts
DROP CONSTRAINT contract_hash_uc;

View File

@ -1,2 +0,0 @@
ALTER TABLE watched_contracts
ADD CONSTRAINT contract_hash_uc UNIQUE (contract_hash);

View File

@ -1 +0,0 @@
DROP TABLE logs;

View File

@ -1,11 +0,0 @@
BEGIN;
ALTER TABLE transactions
DROP CONSTRAINT blocks_fk;
ALTER TABLE transactions
ADD CONSTRAINT fk_test
FOREIGN KEY (block_id)
REFERENCES blocks (id);
COMMIT;

View File

@ -1,12 +0,0 @@
BEGIN;
ALTER TABLE transactions
DROP CONSTRAINT fk_test;
ALTER TABLE transactions
ADD CONSTRAINT blocks_fk
FOREIGN KEY (block_id)
REFERENCES blocks (id)
ON DELETE CASCADE;
COMMIT;

View File

@ -1,11 +0,0 @@
BEGIN;
ALTER TABLE blocks
DROP CONSTRAINT node_fk;
ALTER TABLE blocks
ADD CONSTRAINT node_fk
FOREIGN KEY (node_id)
REFERENCES nodes (id);
COMMIT;

View File

@ -1,2 +0,0 @@
ALTER TABLE blocks
DROP COLUMN is_final;

View File

@ -1,2 +0,0 @@
ALTER TABLE blocks
ADD COLUMN is_final BOOLEAN;

View File

@ -1,2 +0,0 @@
ALTER TABLE blocks
DROP CONSTRAINT node_id_block_number_uc;

View File

@ -1,2 +0,0 @@
ALTER TABLE blocks
ADD CONSTRAINT node_id_block_number_uc UNIQUE (block_number, node_id);

View File

@ -1 +0,0 @@
DROP INDEX block_id_index;

View File

@ -1 +0,0 @@
CREATE INDEX block_id_index ON transactions (block_id);

View File

@ -1 +0,0 @@
DROP INDEX node_id_index;

View File

@ -1 +0,0 @@
CREATE INDEX node_id_index ON blocks (node_id);

View File

@ -1 +0,0 @@
DROP INDEX tx_to_index;

View File

@ -1 +0,0 @@
CREATE INDEX tx_to_index ON transactions(tx_to);

View File

@ -1 +0,0 @@
DROP INDEX tx_from_index;

View File

@ -1 +0,0 @@
CREATE INDEX tx_from_index ON transactions(tx_from);

View File

@ -1,2 +0,0 @@
ALTER TABLE blocks
DROP COLUMN block_miner;

View File

@ -1,2 +0,0 @@
ALTER TABLE blocks
ADD COLUMN block_miner VARCHAR(42);

View File

@ -1,2 +0,0 @@
ALTER TABLE blocks
DROP COLUMN block_extra_data;

View File

@ -1,2 +0,0 @@
ALTER TABLE blocks
ADD COLUMN block_extra_data VARCHAR;

View File

@ -1,3 +0,0 @@
ALTER TABLE blocks
DROP COLUMN block_reward,
DROP COLUMN block_uncles_reward;

View File

@ -1,3 +0,0 @@
ALTER TABLE blocks
ADD COLUMN block_reward NUMERIC,
ADD COLUMN block_uncles_reward NUMERIC;

View File

@ -1,2 +0,0 @@
ALTER TABLE transactions
DROP COLUMN tx_input_data;

View File

@ -1,2 +0,0 @@
ALTER TABLE transactions
ADD COLUMN tx_input_data VARCHAR;

View File

@ -1,2 +0,0 @@
DROP TABLE receipts;

View File

@ -1,16 +0,0 @@
CREATE TABLE receipts
(
id SERIAL PRIMARY KEY,
transaction_id INTEGER NOT NULL,
contract_address VARCHAR(42),
cumulative_gas_used NUMERIC,
gas_used NUMERIC,
state_root VARCHAR(66),
status INTEGER,
tx_hash VARCHAR(66),
CONSTRAINT transaction_fk FOREIGN KEY (transaction_id)
REFERENCES transactions (id)
ON DELETE CASCADE
);

View File

@ -1 +0,0 @@
DROP INDEX transaction_id_index;

View File

@ -1 +0,0 @@
CREATE INDEX transaction_id_index ON receipts (transaction_id);

Some files were not shown because too many files have changed in this diff Show More