remove maker migrations and convert back to timestamps and fix bug in

composeAndExecute command
This commit is contained in:
Ian Norden 2019-02-13 15:00:09 -06:00
parent b449193b16
commit 3d34a9e7c9
25 changed files with 137 additions and 5108 deletions

1
.gitignore vendored
View File

@ -12,3 +12,4 @@ postgraphile/build/
postgraphile/node_modules/ postgraphile/node_modules/
postgraphile/package-lock.json postgraphile/package-lock.json
vulcanizedb.log vulcanizedb.log
db/migrations/00*.sql

View File

@ -54,23 +54,35 @@ var composeAndExecuteCmd = &cobra.Command{
ipcPath = "http://kovan0.vulcanize.io:8545" ipcPath = "http://kovan0.vulcanize.io:8545"
[exporter] [exporter]
name = "exporter" name = "exampleTransformerExporter"
[exporter.transformers] save = false
transformer1 = "path/to/transformer1" transformerNames = [
transformer2 = "path/to/transformer2" "transformer1",
transformer3 = "path/to/transformer3" "transformer2",
transformer4 = "path/to/transformer4" "transformer3",
[exporter.types] "transformer4",
transformer1 = "eth_event" ]
transformer2 = "eth_event" [exporter.transformer1]
transformer3 = "eth_event" path = "path/to/transformer1"
transformer4 = "eth_storage" type = "eth_event"
[exporter.repositories] repository = "github.com/account/repo"
transformers = "github.com/account/repo" migrations = "db/migrations"
transformer4 = "github.com/account2/repo2" [exporter.transformer2]
[exporter.migrations] path = "path/to/transformer2"
transformers = "db/migrations" type = "eth_event"
transformer4 = "to/db/migrations" repository = "github.com/account/repo"
migrations = "db/migrations"
[exporter.transformer3]
path = "path/to/transformer3"
type = "eth_storage"
repository = "github.com/account/repo"
migrations = "db/migrations"
[exporter.transformer4]
path = "path/to/transformer4"
type = "eth_event"
repository = "github.com/account2/repo2"
migrations = "to/db/migrations"
Note: If any of the imported transformer need additional Note: If any of the imported transformer need additional
config variables do not forget to include those as well config variables do not forget to include those as well
@ -81,7 +93,7 @@ This plugin is loaded and the set of transformer initializers is exported
from it and loaded into and executed over by the appropriate watcher. from it and loaded into and executed over by the appropriate watcher.
The type of watcher that the transformer works with is specified using the The type of watcher that the transformer works with is specified using the
exporter.types config variable as shown above. Currently there are watchers type variable for each transformer in the config. Currently there are watchers
of event data from an eth node (eth_event) and storage data from an eth node of event data from an eth node (eth_event) and storage data from an eth node
(eth_storage). Soon there will be watchers for ipfs (ipfs_event and ipfs_storage). (eth_storage). Soon there will be watchers for ipfs (ipfs_event and ipfs_storage).
@ -217,34 +229,32 @@ func prepConfig() {
transformers := make(map[string]config.Transformer) transformers := make(map[string]config.Transformer)
for _, name := range names { for _, name := range names {
transformer := viper.GetStringMapString("exporter." + name) transformer := viper.GetStringMapString("exporter." + name)
_, ok := transformer["path"] p, ok := transformer["path"]
if !ok { if !ok || p == "" {
log.Fatal(fmt.Sprintf("%s transformer config is missing `path` value", name)) log.Fatal(fmt.Sprintf("%s transformer config is missing `path` value", name))
} }
_, ok = transformer["repository"] r, ok := transformer["repository"]
if !ok { if !ok || r == "" {
log.Fatal(fmt.Sprintf("%s transformer config is missing `repository` value", name)) log.Fatal(fmt.Sprintf("%s transformer config is missing `repository` value", name))
} }
_, ok = transformer["migrations"] m, ok := transformer["migrations"]
if !ok { if !ok || m == "" {
log.Fatal(fmt.Sprintf("%s transformer config is missing `migrations` value", name)) log.Fatal(fmt.Sprintf("%s transformer config is missing `migrations` value", name))
} }
ty, ok := transformer["type"] t, ok := transformer["type"]
if !ok { if !ok {
log.Fatal(fmt.Sprintf("%s transformer config is missing `type` value", name)) log.Fatal(fmt.Sprintf("%s transformer config is missing `type` value", name))
} }
transformerType := config.GetTransformerType(t)
transformerType := config.GetTransformerType(ty)
if transformerType == config.UnknownTransformerType { if transformerType == config.UnknownTransformerType {
log.Fatal(errors.New(`unknown transformer type in exporter config log.Fatal(errors.New(`unknown transformer type in exporter config accepted types are "eth_event", "eth_storage"`))
accepted types are "eth_event", "eth_storage"`))
} }
transformers[name] = config.Transformer{ transformers[name] = config.Transformer{
Path: transformer["path"], Path: p,
Type: transformerType, Type: transformerType,
RepositoryPath: transformer["repository"], RepositoryPath: r,
MigrationPath: transformer["migrations"], MigrationPath: m,
} }
} }

View File

@ -1,71 +0,0 @@
// VulcanizeDB
// Copyright © 2018 Vulcanize
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
package cmd
import (
"github.com/spf13/cobra"
"github.com/vulcanize/vulcanizedb/libraries/shared"
"github.com/vulcanize/vulcanizedb/pkg/datastore/postgres"
"github.com/vulcanize/vulcanizedb/pkg/fs"
"github.com/vulcanize/vulcanizedb/pkg/transformers"
"github.com/vulcanize/vulcanizedb/pkg/transformers/shared/storage"
"log"
)
// parseStorageDiffsCmd represents the parseStorageDiffs command
var parseStorageDiffsCmd = &cobra.Command{
Use: "parseStorageDiffs",
Short: "Continuously ingest storage diffs from a CSV file",
Long: `Read storage diffs out of a CSV file that is constantly receiving
new rows from an Ethereum node. For example:
./vulcanizedb parseStorageDiffs --config=environments/staging.toml
Note that the path to your storage diffs must be configured in your toml
file under storageDiffsPath.`,
Run: func(cmd *cobra.Command, args []string) {
parseStorageDiffs()
},
}
func init() {
rootCmd.AddCommand(parseStorageDiffsCmd)
}
func parseStorageDiffs() {
blockChain := getBlockChain()
db, err := postgres.NewDB(databaseConfig, blockChain.Node())
if err != nil {
log.Fatal("Failed to initialize database: ", err)
}
tailer := fs.FileTailer{Path: storageDiffsPath}
// TODO: configure transformers
watcher := shared.NewStorageWatcher(tailer, db)
watcher.AddTransformers([]storage.TransformerInitializer{
transformers.GetCatStorageTransformer().NewTransformer,
transformers.GetPitStorageTransformer().NewTransformer,
transformers.GetVatStorageTransformer().NewTransformer,
transformers.GetVowStorageTransformer().NewTransformer,
})
err = watcher.Execute()
if err != nil {
log.Fatal(err)
}
}

File diff suppressed because it is too large Load Diff

View File

@ -1,5 +1,5 @@
[database] [database]
name = "vulcanize_public" name = "vulcanize_infura"
hostname = "localhost" hostname = "localhost"
user = "vulcanize" user = "vulcanize"
password = "vulcanize" password = "vulcanize"
@ -28,7 +28,6 @@
"flap_kick", "flap_kick",
"flip_kick", "flip_kick",
"flop_kick", "flop_kick",
"frob_kick",
"frob", "frob",
"pit_file_debt_ceiling", "pit_file_debt_ceiling",
"pit_file_ilk", "pit_file_ilk",
@ -85,7 +84,7 @@
type = "eth_event" type = "eth_event"
repository = "github.com/vulcanize/mcd_transformers" repository = "github.com/vulcanize/mcd_transformers"
migrations = "db/migrations" migrations = "db/migrations"
[exporter.drip_file_repo] [exporter.drop_file_repo]
path = "transformers/drip_file/repo/initializer" path = "transformers/drip_file/repo/initializer"
type = "eth_event" type = "eth_event"
repository = "github.com/vulcanize/mcd_transformers" repository = "github.com/vulcanize/mcd_transformers"

View File

@ -0,0 +1,45 @@
[database]
name = "vulcanize_public"
hostname = "localhost"
user = "vulcanize"
password = "vulcanize"
port = 5432
[client]
ipcPath = "http://kovan0.vulcanize.io:8545"
[datadog]
name = "maker_vdb_staging"
[exporter]
name = "storageTransformerExporter"
save = false
transformerNames = [
"pit",
"vat",
"vow"
]
[exporter.pit]
path = "transformers/storage_diffs/maker/pit/initializer"
type = "eth_storage"
repository = "github.com/vulcanize/mcd_transformers"
migrations = "db/migrations"
[exporter.vat]
path = "transformers/storage_diffs/maker/vat/initializer"
type = "eth_storage"
repository = "github.com/vulcanize/mcd_transformers"
migrations = "db/migrations"
[exporter.vow]
path = "transformers/storage_diffs/maker/vow/initializer"
type = "eth_storage"
repository = "github.com/vulcanize/mcd_transformers"
migrations = "db/migrations"
[filesystem]
storageDiffsPath = "INSERT-PATH-TO-STORAGE-DIFFS"
[contract]
[contract.address]
pit = "0xe7cf3198787c9a4daac73371a38f29aaeeced87e"
vat = "0xcd726790550afcd77e9a7a47e86a3f9010af126b"
vow = "0x3728e9777B2a0a611ee0F89e00E01044ce4736d1"

View File

@ -22,8 +22,8 @@ import (
"github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/crypto" "github.com/ethereum/go-ethereum/crypto"
"github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils"
"github.com/vulcanize/vulcanizedb/pkg/datastore/postgres" "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres"
"github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils"
) )
type Mappings interface { type Mappings interface {
@ -32,14 +32,18 @@ type Mappings interface {
} }
const ( const (
IndexZero = "0000000000000000000000000000000000000000000000000000000000000000" IndexZero = "0000000000000000000000000000000000000000000000000000000000000000"
IndexOne = "0000000000000000000000000000000000000000000000000000000000000001" IndexOne = "0000000000000000000000000000000000000000000000000000000000000001"
IndexTwo = "0000000000000000000000000000000000000000000000000000000000000002" IndexTwo = "0000000000000000000000000000000000000000000000000000000000000002"
IndexThree = "0000000000000000000000000000000000000000000000000000000000000003" IndexThree = "0000000000000000000000000000000000000000000000000000000000000003"
IndexFour = "0000000000000000000000000000000000000000000000000000000000000004" IndexFour = "0000000000000000000000000000000000000000000000000000000000000004"
IndexFive = "0000000000000000000000000000000000000000000000000000000000000005" IndexFive = "0000000000000000000000000000000000000000000000000000000000000005"
IndexSix = "0000000000000000000000000000000000000000000000000000000000000006" IndexSix = "0000000000000000000000000000000000000000000000000000000000000006"
IndexSeven = "0000000000000000000000000000000000000000000000000000000000000007" IndexSeven = "0000000000000000000000000000000000000000000000000000000000000007"
IndexEight = "0000000000000000000000000000000000000000000000000000000000000008"
IndexNine = "0000000000000000000000000000000000000000000000000000000000000009"
IndexTen = "0000000000000000000000000000000000000000000000000000000000000010"
IndexEleven = "0000000000000000000000000000000000000000000000000000000000000011 "
) )
func GetMapping(indexOnContract, key string) common.Hash { func GetMapping(indexOnContract, key string) common.Hash {

View File

@ -87,6 +87,7 @@ const (
func (pt TransformerType) String() string { func (pt TransformerType) String() string {
names := [...]string{ names := [...]string{
"Unknown",
"eth_event", "eth_event",
"eth_storage", "eth_storage",
} }
@ -105,7 +106,7 @@ func GetTransformerType(str string) TransformerType {
} }
for _, ty := range types { for _, ty := range types {
if ty.String() == str && ty.String() != "Unknown" { if ty.String() == str {
return ty return ty
} }
} }

View File

@ -78,18 +78,6 @@ var storageConfig = config.Plugin{
var combinedConfig = config.Plugin{ var combinedConfig = config.Plugin{
Transformers: map[string]config.Transformer{ Transformers: map[string]config.Transformer{
"pit": {
Path: "transformers/storage_diffs/maker/pit/initializer",
Type: config.EthStorage,
MigrationPath: "db/migrations",
RepositoryPath: "github.com/vulcanize/mcd_transformers",
},
"vat": {
Path: "transformers/storage_diffs/maker/vat/initializer",
Type: config.EthStorage,
MigrationPath: "db/migrations",
RepositoryPath: "github.com/vulcanize/mcd_transformers",
},
"bite": { "bite": {
Path: "transformers/bite/initializer", Path: "transformers/bite/initializer",
Type: config.EthEvent, Type: config.EthEvent,
@ -102,6 +90,18 @@ var combinedConfig = config.Plugin{
MigrationPath: "db/migrations", MigrationPath: "db/migrations",
RepositoryPath: "github.com/vulcanize/mcd_transformers", RepositoryPath: "github.com/vulcanize/mcd_transformers",
}, },
"pit": {
Path: "transformers/storage_diffs/maker/pit/initializer",
Type: config.EthStorage,
MigrationPath: "db/migrations",
RepositoryPath: "github.com/vulcanize/mcd_transformers",
},
"vat": {
Path: "transformers/storage_diffs/maker/vat/initializer",
Type: config.EthStorage,
MigrationPath: "db/migrations",
RepositoryPath: "github.com/vulcanize/mcd_transformers",
},
}, },
FileName: "testComboTransformerSet", FileName: "testComboTransformerSet",
FilePath: "$GOPATH/src/github.com/vulcanize/vulcanizedb/pkg/plugin/test_helpers/test", FilePath: "$GOPATH/src/github.com/vulcanize/vulcanizedb/pkg/plugin/test_helpers/test",
@ -128,6 +128,9 @@ var _ = Describe("Generator test", func() {
var headerID int64 var headerID int64
viper.SetConfigName("compose") viper.SetConfigName("compose")
viper.AddConfigPath("$GOPATH/src/github.com/vulcanize/vulcanizedb/environments/") viper.AddConfigPath("$GOPATH/src/github.com/vulcanize/vulcanizedb/environments/")
AfterSuite(func() {
test_helpers.DropTestSchema(db)
})
Describe("Event Transformers only", func() { Describe("Event Transformers only", func() {
BeforeEach(func() { BeforeEach(func() {
@ -226,6 +229,7 @@ var _ = Describe("Generator test", func() {
Expect(err).ToNot(HaveOccurred()) Expect(err).ToNot(HaveOccurred())
}) })
Describe("GenerateTransformerPlugin", func() { Describe("GenerateTransformerPlugin", func() {
It("It bundles the specified StorageTransformerInitializers into a Exporter object and creates .so", func() { It("It bundles the specified StorageTransformerInitializers into a Exporter object and creates .so", func() {
plug, err := plugin.Open(soPath) plug, err := plugin.Open(soPath)
Expect(err).ToNot(HaveOccurred()) Expect(err).ToNot(HaveOccurred())
@ -275,6 +279,7 @@ var _ = Describe("Generator test", func() {
Expect(err).ToNot(HaveOccurred()) Expect(err).ToNot(HaveOccurred())
}) })
Describe("GenerateTransformerPlugin", func() { Describe("GenerateTransformerPlugin", func() {
It("It bundles the specified TransformerInitializers and StorageTransformerInitializers into a Exporter object and creates .so", func() { It("It bundles the specified TransformerInitializers and StorageTransformerInitializers into a Exporter object and creates .so", func() {
plug, err := plugin.Open(soPath) plug, err := plugin.Open(soPath)
Expect(err).ToNot(HaveOccurred()) Expect(err).ToNot(HaveOccurred())

View File

@ -66,9 +66,16 @@ func (m *manager) RunMigrations() error {
if err != nil { if err != nil {
return err return err
} }
// Fix the migrations
cmd := exec.Command("goose", "fix")
cmd.Dir = m.tmpMigDir
err = cmd.Run()
if err != nil {
return errors.New(fmt.Sprintf("version fixing for plugin migrations failed: %s", err.Error()))
}
// Run the copied migrations with goose // Run the copied migrations with goose
pgStr := fmt.Sprintf("postgres://%s:%d/%s?sslmode=disable", m.DBConfig.Hostname, m.DBConfig.Port, m.DBConfig.Name) pgStr := fmt.Sprintf("postgres://%s:%d/%s?sslmode=disable", m.DBConfig.Hostname, m.DBConfig.Port, m.DBConfig.Name)
cmd := exec.Command("goose", "postgres", pgStr, "up") cmd = exec.Command("goose", "postgres", pgStr, "up")
cmd.Dir = m.tmpMigDir cmd.Dir = m.tmpMigDir
err = cmd.Run() err = cmd.Run()
if err != nil { if err != nil {

View File

@ -73,9 +73,11 @@ func TearDown(db *postgres.DB) {
_, err = tx.Exec(`DELETE FROM checked_headers`) _, err = tx.Exec(`DELETE FROM checked_headers`)
Expect(err).NotTo(HaveOccurred()) Expect(err).NotTo(HaveOccurred())
_, err = tx.Exec(`DELETE FROM maker.bite`)
Expect(err).NotTo(HaveOccurred())
err = tx.Commit() err = tx.Commit()
Expect(err).NotTo(HaveOccurred()) Expect(err).NotTo(HaveOccurred())
} }
func DropTestSchema(db *postgres.DB) {
_, err := db.Exec(`DROP SCHEMA IF EXISTS maker CASCADE`)
Expect(err).NotTo(HaveOccurred())
}

View File

@ -1,167 +0,0 @@
# Storage Transformer Example
In the Storage Transformer README, we went over code that needs to be written to add a new storage transformer to VulcanizeDB.
In this document, we'll go over an example contract and discuss how one would go about watching its storage.
## Example Contract
For the purposes of this document, we'll be assuming that we're interested in watching the following contract:
```solidity
pragma solidity ^0.5.1;
contract Contract {
uint256 public num_addresses;
mapping(address => uint) public addresses;
event AddressAdded(
address addr,
uint256 num_addrs
);
constructor() public {
addresses[msg.sender] = 1;
num_addresses = 1;
}
function add_address(address addr) public {
bool exists = addresses[addr] > 0;
addresses[addr] = addresses[addr] + 1;
if (!exists) {
emit AddressAdded(addr, ++num_addresses);
}
}
}
```
Disclaimer: this contract has not been audited and is not intended to be modeled or used in production. :)
This contract persists two values in it's storage:
1. `num_addresses`: the total number of unique addresses known to the contract.
2. `addresses`: a mapping that records the number of times an address has been added to the contract.
It also emits an event each time a new address is added into the contract's storage.
## Custom Code
In order to monitor the state of this smart contract, we'd need to implement: an event transformer, a mappings namespace, and a repository.
We will go through each of these in turn.
### Event Transformer
Given that the contract's storage includes a mapping, `addresses`, we will need to be able to identify the keys to that mapping that exist in the system so that we can recognize contract storage keys that correspond to non-zero values in that mapping.
The simplest way to be aware of keys used in a contract's mapping is to listen for contract events that emit the keys that are used in its mapping(s).
Since this contract includes an event, `AddressAdded`, that is emitted each time a new address is added to the `addresses` mapping, we will want to listen for those events and cache the adddresses that map to non-zero values.
Please see the event transformer README for detailed instructions about developing this code.
In short, it should be feasible to recognize `AddressAdded` events on the blockchain and parse them to keep a record of addresses that have been added to the system.
### Mappings
If we point an ethereum node at a blockchain hosting this contract and our node is equipped to write out storage changes happening on this contract, we will expect such changes to appear each time `add_address` (which modifies the `addresses` mapping) is called.
In order for those changes - which include raw hex versions of storage keys and storage values, to be useful for us - we need to know how to recognize and parse them.
Our mappings file should assist us with both of these tasks: the `Lookup` function should recognize raw storage keys and return known metadata about the storage value.
In order to perform this lookup, the mappings file should maintain its own mapping of known storage keys to the corresponding storage value metadata.
This internal mapping should contain the storage key for `num_addresses` as well as a storage key for each `addresses` key known to be associated with a non-zero value.
#### num_addresses
`num_addresses` is the first variable declared on the contract, and it is a simple (non-array, non-mapping) type.
Therefore, we know that its storage key is `0000000000000000000000000000000000000000000000000000000000000000`.
The storage key for non-array and non-mapping variables is (usually*) the index of the variable on the contract's storage.
If we see a storage diff being emitted from this contract with this storage key, we know that the `num_addresses` variable has been modified.
In this case, we would expect that the call `mappings.Lookup("0000000000000000000000000000000000000000000000000000000000000000")` would return metadata corresponding to the `num_addresses` variable.
This metadata would probably look something like:
```golang
shared.StorageValueMetadata{
Name: "num_addresses",
Keys: nil,
Type: shared.Uint256,
}
```
<sup>*</sup> Occasionally, multiple variables may be packed into one storage slot, which complicates a direct translation of the index of the variable on the contract to its storage key.
#### addresses
`addresses` is the second variable declared on the contract, but it is a mapping.
Since it is a mapping, the storage key is more complex than `0000000000000000000000000000000000000000000000000000000000000001` (which would be the key for the variable if it were not an array or mapping).
Having a single storage slot for an entire mapping would not work, since there can be an arbitrary number of entries in a mapping, and a single storage value slot is constrained to 32 bytes.
The way that smart contract mappings are maintained in storage (in Solidity) is by creating a new storage key/value pair for each entry in the mapping, where the storage key is a hash of the occupied slot's key concatenated with the mapping's index on the contract.
Given an occupied slot's key, `k`, and a mapping's index on the contract, `i`, we can generate the storage key with the following code:
```golang
func GetMappingStorageKey(k, i string) string {
return common.BytesToHash(crypto.Keccak256(common.FromHex(k + i))).Hex()
}
```
If we were to call the contract's `add_address` function with `0xde0B295669a9FD93d5F28D9Ec85E40f4cb697BAe`, we would expect to see an `AddressAdded` event emitted, with `0xde0B295669a9FD93d5F28D9Ec85E40f4cb697BAe` in its payload.
From that event, we would know that there exists in the contract's storage a storage key of:
```golang
GetMappingStorageKey("0xde0B295669a9FD93d5F28D9Ec85E40f4cb697BAe", "0000000000000000000000000000000000000000000000000000000000000001")
```
Executing the above code results in: `0x0f96a1133cfd5b94c329aa0526b5962bd791dbbfc481ca82f7d4a439e1e9bc40`.
Therefore, the first time `add_address` was called for this address, we would also expect to see a storage diff with a key of `0x0f96a1133cfd5b94c329aa0526b5962bd791dbbfc481ca82f7d4a439e1e9bc40` and a value of `0000000000000000000000000000000000000000000000000000000000000001`.
This would be the indication that in contract storage, the address `0xde0B295669a9FD93d5F28D9Ec85E40f4cb697BAe` maps to the value 1.
Given that we knew this address was a key in the mapping from our event transformer, we would expect a call to `mappings.Lookup("0x0f96a1133cfd5b94c329aa0526b5962bd791dbbfc481ca82f7d4a439e1e9bc40")` to return metadata corresponding to _this slot_ in the addresses mapping:
```golang
shared.StorageValueMetadata{
Name: "addresses,
Keys: map[Key]string{Address: "0xde0B295669a9FD93d5F28D9Ec85E40f4cb697BAe"},
Type: shared.Uint256,
}
```
### Repository
Once we have recognized a storage diff, we can decode the storage value to the data's known type.
Since the metadata tells us that the above values are `uint256`, we can decode a value like `0000000000000000000000000000000000000000000000000000000000000001` to `1`.
The purpose of the contract-specific repository is to write that value to the database in a way that makes it useful for future queries.
Typically, the involves writing the block hash, block number, decoded value, and any keys in the metadata to a table.
The current repository interface has a generalized `Create` function that can accept any arbitrary storage row along with it's metadata.
This is deliberate, to facilitate shared use of the common storage transformer.
An implication of this decision is that the `Create` function typically includes a `switch` statement that selects which table to write to, as well as what data to include, based on the name of the variable as defined in the metadata.
An example implementation of `Create` for our example contract above might look like:
```golang
func (repository AddressStorageRepository) Create(blockNumber int, blockHash string, metadata shared.StorageValueMetadata, value interface{}) error {
switch metadata.Name {
case "num_addresses":
_, err := repository.db.Exec(`INSERT INTO storage.num_addresses (block_hash, block_number, n) VALUES ($1, $2, $3)`,
blockHash, blockNumber, value)
return err
case "addresses":
_, err := repository.db.Exec(`INSERT INTO storage.addresses (block_hash, block_number, address, n) VALUES ($1, $2, $3, $4)`,
blockHash, blockNumber, metadata.Keys[Address], value)
return err
default:
panic(fmt.Sprintf("unrecognized contract storage name: %s", metadata.Name))
}
}
```
## Summary
With our very simple address storing contract, we would be able to read it's storage diffs by implementing an event transformer, a mappings, and a repository.
The mappings would be able to lookup storage keys reflecting `num_addresses` or any slot in `addresses`, using addresses derived from watching the `AddressAdded` event for the latter.
The repository would be able to persist the value or `num_addresses` or any slot in `addresses`, using metadata returned from the mappings.
The mappings and repository could be plugged into the common storage transformer, enabling us to know the contract's state as it is changing.

View File

@ -1,124 +0,0 @@
# Watching Contract Storage
One approach VulcanizeDB takes to caching and indexing smart contracts is to ingest raw contract storage values.
Assuming that you are running an ethereum node that is writing contract storage changes to a CSV file, VulcanizeDB can parse them and persist the results to postgres.
## Assumptions
The current approach for caching smart contract storage diffs assumes that you are running a node that is writing contract storage diffs to a CSV file.
The CSV file is expected to have 5 columns: contract address, block hash, block number, storage key, storage value.
We have [a branch on vulcanize/parity-ethereum](https://github.com/vulcanize/parity-ethereum/tree/watch-storage-diffs) that enables running a node that writes storage diffs this way.
We also have [sample data](https://github.com/8thlight/maker-vulcanizedb/pull/132/files) that comes from running that node against Kovan through block 9796184.
Looking forward, we would like to isolate this assumption as much as possible.
We may end up needing to read CSV data that is formatted differently, or reading data from a non-CSV source, and we do not want resulting changes to cascade throughout the codebase.
## Shared Code
VulcanizeDB has shared code for continuously reading from the CSV file written by the ethereum node and writing a parsed version of each row to postgres.
### Storage Watcher
The storage watcher is responsible for continuously delegating CSV rows to the appropriate transformer as they are being written by the ethereum node.
It maintains a mapping of contract addresses to transformers, and will ignore storage diff rows for contract addresses that do not have a corresponding transformer.
The storage watcher is currently initialized from the `parseStorageDiffs` command, which also adds transformers that the watcher should know about in its mapping of addresses to transformers.
### Storage Transformer
The storage transformer is responsible for converting raw contract storage hex values into useful data and writing them to postgres.
The storage transformer depends on contract-specific implementations of code capable of recognizing storage keys and writing the matching (decoded) storage value to disk.
```golang
func (transformer Transformer) Execute(row shared.StorageDiffRow) error {
metadata, lookupErr := transformer.Mappings.Lookup(row.StorageKey)
if lookupErr != nil {
return lookupErr
}
value, decodeErr := shared.Decode(row, metadata)
if decodeErr != nil {
return decodeErr
}
return transformer.Repository.Create(row.BlockHeight, row.BlockHash.Hex(), metadata, value)
}
```
## Custom Code
In order to watch an additional smart contract, a developer must create three things:
1. Mappings - specify how to identify keys in the contract's storage trie.
1. Repository - specify how to persist a parsed version of the storage value matching the recognized storage key.
1. Instance - create an instance of the storage transformer that uses your mappings and repository.
### Mappings
```golang
type Mappings interface {
Lookup(key common.Hash) (shared.StorageValueMetadata, error)
SetDB(db *postgres.DB)
}
```
A contract-specific implementation of the mappings interface enables the storage transformer to fetch metadata associated with a storage key.
Storage metadata contains: the name of the variable matching the storage key, a raw version of any keys associated with the variable (if the variable is a mapping), and the variable's type.
```golang
type StorageValueMetadata struct {
Name string
Keys map[Key]string
Type ValueType
}
```
Keys are only relevant if the variable is a mapping. For example, in the following Solidity code:
```solidity
pragma solidity ^0.4.0;
contract Contract {
uint x;
mapping(address => uint) y;
}
```
The metadata for variable `x` would not have any associated keys, but the metadata for a storage key associated with `y` would include the address used to specify that key's index in the mapping.
The `SetDB` function is required for the mappings to connect to the database.
A database connection may be desired when keys in a mapping variable need to be read from log events (e.g. to lookup what addresses may exist in `y`, above).
### Repository
```golang
type Repository interface {
Create(blockNumber int, blockHash string, metadata shared.StorageValueMetadata, value interface{}) error
SetDB(db *postgres.DB)
}
```
A contract-specific implementation of the repository interface enables the transformer to write the decoded storage value to the appropriate table in postgres.
The `Create` function is expected to recognize and persist a given storage value by the variable's name, as indicated on the row's metadata.
The `SetDB` function is required for the repository to connect to the database.
### Instance
```golang
type Transformer struct {
Address common.Address
Mappings storage_diffs.Mappings
Repository storage_diffs.Repository
}
```
A new instance of the storage transformer is initialized with the contract-specific mappings and repository, as well as the contract's address.
The contract's address is included so that the watcher can query that value from the transformer in order to build up its mapping of addresses to transformers.
## Summary
To begin watching an additional smart contract, create a new mappings file for looking up storage keys on that contract, a repository for writing storage values from the contract, and initialize a new storage transformer instance with the mappings, repository, and contract address.
The new instance, wrapped in an initializer that calls `SetDB` on the mappings and repository, should be passed to the `AddTransformers` function on the storage watcher.

View File

@ -1,19 +0,0 @@
package flop_kick_test
import (
"github.com/sirupsen/logrus"
"io/ioutil"
"testing"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
)
func TestFlopKick(t *testing.T) {
RegisterFailHandler(Fail)
RunSpecs(t, "FlopKick Suite")
}
var _ = BeforeSuite(func() {
logrus.SetOutput(ioutil.Discard)
})

View File

@ -1,19 +0,0 @@
package shared_test
import (
"github.com/sirupsen/logrus"
"io/ioutil"
"testing"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
)
func TestShared(t *testing.T) {
RegisterFailHandler(Fail)
RunSpecs(t, "Shared Suite")
}
var _ = BeforeSuite(func() {
logrus.SetOutput(ioutil.Discard)
})

View File

@ -1,35 +0,0 @@
// VulcanizeDB
// Copyright © 2018 Vulcanize
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
package maker_test
import (
"github.com/sirupsen/logrus"
"io/ioutil"
"testing"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
)
func TestMaker(t *testing.T) {
RegisterFailHandler(Fail)
RunSpecs(t, "Maker Suite")
}
var _ = BeforeSuite(func() {
logrus.SetOutput(ioutil.Discard)
})

View File

@ -1,19 +0,0 @@
package pit_test
import (
"github.com/sirupsen/logrus"
"io/ioutil"
"testing"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
)
func TestPit(t *testing.T) {
RegisterFailHandler(Fail)
RunSpecs(t, "Pit Suite")
}
var _ = BeforeSuite(func() {
logrus.SetOutput(ioutil.Discard)
})

View File

@ -1,19 +0,0 @@
package vat_test
import (
"github.com/sirupsen/logrus"
"io/ioutil"
"testing"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
)
func TestVat(t *testing.T) {
RegisterFailHandler(Fail)
RunSpecs(t, "Vat Suite")
}
var _ = BeforeSuite(func() {
logrus.SetOutput(ioutil.Discard)
})

View File

@ -1,152 +0,0 @@
/*
* Copyright 2018 Vulcanize
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package vow
import (
"github.com/ethereum/go-ethereum/common"
"github.com/vulcanize/vulcanizedb/pkg/datastore/postgres"
"github.com/vulcanize/vulcanizedb/pkg/transformers/storage_diffs"
"github.com/vulcanize/vulcanizedb/pkg/transformers/storage_diffs/maker"
"github.com/vulcanize/vulcanizedb/pkg/transformers/storage_diffs/shared"
)
const (
VowVat = "vat"
VowCow = "cow"
VowRow = "row"
VowSin = "Sin"
VowWoe = "Woe"
VowAsh = "Ash"
VowWait = "wait"
VowSump = "sump"
VowBump = "bump"
VowHump = "hump"
)
var (
VatKey = common.HexToHash(storage_diffs.IndexOne)
VatMetadata = shared.StorageValueMetadata{
Name: VowVat,
Keys: nil,
Type: shared.Address,
}
CowKey = common.HexToHash(storage_diffs.IndexTwo)
CowMetadata = shared.StorageValueMetadata{
Name: VowCow,
Keys: nil,
Type: shared.Address,
}
RowKey = common.HexToHash(storage_diffs.IndexThree)
RowMetadata = shared.StorageValueMetadata{
Name: VowRow,
Keys: nil,
Type: shared.Address,
}
SinKey = common.HexToHash(storage_diffs.IndexFive)
SinMetadata = shared.StorageValueMetadata{
Name: VowSin,
Keys: nil,
Type: shared.Uint256,
}
WoeKey = common.HexToHash(storage_diffs.IndexSix)
WoeMetadata = shared.StorageValueMetadata{
Name: VowWoe,
Keys: nil,
Type: shared.Uint256,
}
AshKey = common.HexToHash(storage_diffs.IndexSeven)
AshMetadata = shared.StorageValueMetadata{
Name: VowAsh,
Keys: nil,
Type: shared.Uint256,
}
WaitKey = common.HexToHash(storage_diffs.IndexEight)
WaitMetadata = shared.StorageValueMetadata{
Name: VowWait,
Keys: nil,
Type: shared.Uint256,
}
SumpKey = common.HexToHash(storage_diffs.IndexNine)
SumpMetadata = shared.StorageValueMetadata{
Name: VowSump,
Keys: nil,
Type: shared.Uint256,
}
BumpKey = common.HexToHash(storage_diffs.IndexTen)
BumpMetadata = shared.StorageValueMetadata{
Name: VowBump,
Keys: nil,
Type: shared.Uint256,
}
HumpKey = common.HexToHash(storage_diffs.IndexEleven)
HumpMetadata = shared.StorageValueMetadata{
Name: VowHump,
Keys: nil,
Type: shared.Uint256,
}
)
type VowMappings struct {
StorageRepository maker.IMakerStorageRepository
mappings map[common.Hash]shared.StorageValueMetadata
}
func (mappings *VowMappings) Lookup(key common.Hash) (shared.StorageValueMetadata, error) {
metadata, ok := mappings.mappings[key]
if !ok {
err := mappings.loadMappings()
if err != nil {
return metadata, err
}
metadata, ok = mappings.mappings[key]
if !ok {
return metadata, shared.ErrStorageKeyNotFound{Key: key.Hex()}
}
}
return metadata, nil
}
func (mappings *VowMappings) loadMappings() error {
staticMappings := make(map[common.Hash]shared.StorageValueMetadata)
staticMappings[VatKey] = VatMetadata
staticMappings[CowKey] = CowMetadata
staticMappings[RowKey] = RowMetadata
staticMappings[SinKey] = SinMetadata
staticMappings[WoeKey] = WoeMetadata
staticMappings[AshKey] = AshMetadata
staticMappings[WaitKey] = WaitMetadata
staticMappings[SumpKey] = SumpMetadata
staticMappings[BumpKey] = BumpMetadata
staticMappings[HumpKey] = HumpMetadata
mappings.mappings = staticMappings
return nil
}
func (mappings *VowMappings) SetDB(db *postgres.DB) {
mappings.StorageRepository.SetDB(db)
}

View File

@ -1,42 +0,0 @@
package vow_test
import (
"github.com/ethereum/go-ethereum/common"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
"github.com/vulcanize/vulcanizedb/pkg/fakes"
"github.com/vulcanize/vulcanizedb/pkg/transformers/storage_diffs/maker/test_helpers"
"github.com/vulcanize/vulcanizedb/pkg/transformers/storage_diffs/maker/vow"
"github.com/vulcanize/vulcanizedb/pkg/transformers/storage_diffs/shared"
)
var _ = Describe("Vow storage mappings", func() {
Describe("looking up static keys", func() {
It("returns value metadata if key exists", func() {
storageRepository := &test_helpers.MockMakerStorageRepository{}
mappings := vow.VowMappings{StorageRepository: storageRepository}
Expect(mappings.Lookup(vow.VatKey)).To(Equal(vow.VatMetadata))
Expect(mappings.Lookup(vow.CowKey)).To(Equal(vow.CowMetadata))
Expect(mappings.Lookup(vow.RowKey)).To(Equal(vow.RowMetadata))
Expect(mappings.Lookup(vow.SinKey)).To(Equal(vow.SinMetadata))
Expect(mappings.Lookup(vow.WoeKey)).To(Equal(vow.WoeMetadata))
Expect(mappings.Lookup(vow.AshKey)).To(Equal(vow.AshMetadata))
Expect(mappings.Lookup(vow.WaitKey)).To(Equal(vow.WaitMetadata))
Expect(mappings.Lookup(vow.SumpKey)).To(Equal(vow.SumpMetadata))
Expect(mappings.Lookup(vow.BumpKey)).To(Equal(vow.BumpMetadata))
Expect(mappings.Lookup(vow.HumpKey)).To(Equal(vow.HumpMetadata))
})
It("returns error if key does not exist", func() {
storageRepository := &test_helpers.MockMakerStorageRepository{}
mappings := vow.VowMappings{StorageRepository: storageRepository}
_, err := mappings.Lookup(common.HexToHash(fakes.FakeHash.Hex()))
Expect(err).To(HaveOccurred())
Expect(err).To(MatchError(shared.ErrStorageKeyNotFound{Key: fakes.FakeHash.Hex()}))
})
})
})

View File

@ -1,117 +0,0 @@
/*
* Copyright 2018 Vulcanize
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package vow
import (
"github.com/vulcanize/vulcanizedb/pkg/datastore/postgres"
"github.com/vulcanize/vulcanizedb/pkg/transformers/storage_diffs/shared"
)
type VowStorageRepository struct {
db *postgres.DB
}
func (repository *VowStorageRepository) SetDB(db *postgres.DB) {
repository.db = db
}
func (repository VowStorageRepository) Create(blockNumber int, blockHash string, metadata shared.StorageValueMetadata, value interface{}) error {
switch metadata.Name {
case VowVat:
return repository.insertVowVat(blockNumber, blockHash, value.(string))
case VowCow:
return repository.insertVowCow(blockNumber, blockHash, value.(string))
case VowRow:
return repository.insertVowRow(blockNumber, blockHash, value.(string))
case VowSin:
return repository.insertVowSin(blockNumber, blockHash, value.(string))
case VowWoe:
return repository.insertVowWoe(blockNumber, blockHash, value.(string))
case VowAsh:
return repository.insertVowAsh(blockNumber, blockHash, value.(string))
case VowWait:
return repository.insertVowWait(blockNumber, blockHash, value.(string))
case VowSump:
return repository.insertVowSump(blockNumber, blockHash, value.(string))
case VowBump:
return repository.insertVowBump(blockNumber, blockHash, value.(string))
case VowHump:
return repository.insertVowHump(blockNumber, blockHash, value.(string))
default:
panic("unrecognized storage metadata name")
}
}
func (repository VowStorageRepository) insertVowVat(blockNumber int, blockHash string, vat string) error {
_, err := repository.db.Exec(`INSERT INTO maker.vow_vat (block_number, block_hash, vat) VALUES ($1, $2, $3)`, blockNumber, blockHash, vat)
return err
}
func (repository VowStorageRepository) insertVowCow(blockNumber int, blockHash string, cow string) error {
_, err := repository.db.Exec(`INSERT INTO maker.vow_cow (block_number, block_hash, cow) VALUES ($1, $2, $3)`, blockNumber, blockHash, cow)
return err
}
func (repository VowStorageRepository) insertVowRow(blockNumber int, blockHash string, row string) error {
_, err := repository.db.Exec(`INSERT INTO maker.vow_row (block_number, block_hash, row) VALUES ($1, $2, $3)`, blockNumber, blockHash, row)
return err
}
func (repository VowStorageRepository) insertVowSin(blockNumber int, blockHash string, sin string) error {
_, err := repository.db.Exec(`INSERT INTO maker.vow_sin (block_number, block_hash, sin) VALUES ($1, $2, $3)`, blockNumber, blockHash, sin)
return err
}
func (repository VowStorageRepository) insertVowWoe(blockNumber int, blockHash string, woe string) error {
_, err := repository.db.Exec(`INSERT INTO maker.vow_woe (block_number, block_hash, woe) VALUES ($1, $2, $3)`, blockNumber, blockHash, woe)
return err
}
func (repository VowStorageRepository) insertVowAsh(blockNumber int, blockHash string, ash string) error {
_, err := repository.db.Exec(`INSERT INTO maker.vow_ash (block_number, block_hash, ash) VALUES ($1, $2, $3)`, blockNumber, blockHash, ash)
return err
}
func (repository VowStorageRepository) insertVowWait(blockNumber int, blockHash string, wait string) error {
_, err := repository.db.Exec(`INSERT INTO maker.vow_wait (block_number, block_hash, wait) VALUES ($1, $2, $3)`, blockNumber, blockHash, wait)
return err
}
func (repository VowStorageRepository) insertVowSump(blockNumber int, blockHash string, sump string) error {
_, err := repository.db.Exec(`INSERT INTO maker.vow_sump (block_number, block_hash, sump) VALUES ($1, $2, $3)`, blockNumber, blockHash, sump)
return err
}
func (repository VowStorageRepository) insertVowBump(blockNumber int, blockHash string, bump string) error {
_, err := repository.db.Exec(`INSERT INTO maker.vow_bump (block_number, block_hash, bump) VALUES ($1, $2, $3)`, blockNumber, blockHash, bump)
return err
}
func (repository VowStorageRepository) insertVowHump(blockNumber int, blockHash string, hump string) error {
_, err := repository.db.Exec(`INSERT INTO maker.vow_hump (block_number, block_hash, hump) VALUES ($1, $2, $3)`, blockNumber, blockHash, hump)
return err
}

View File

@ -1,13 +0,0 @@
package vow_test
import (
"testing"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
)
func TestVow(t *testing.T) {
RegisterFailHandler(Fail)
RunSpecs(t, "Storage Diff Vow Suite")
}

View File

@ -1,63 +0,0 @@
// VulcanizeDB
// Copyright © 2018 Vulcanize
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
package storage_diffs
import (
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/crypto"
"github.com/vulcanize/vulcanizedb/pkg/datastore/postgres"
"github.com/vulcanize/vulcanizedb/pkg/transformers/storage_diffs/shared"
"math/big"
)
type Mappings interface {
Lookup(key common.Hash) (shared.StorageValueMetadata, error)
SetDB(db *postgres.DB)
}
const (
IndexZero = "0000000000000000000000000000000000000000000000000000000000000000"
IndexOne = "0000000000000000000000000000000000000000000000000000000000000001"
IndexTwo = "0000000000000000000000000000000000000000000000000000000000000002"
IndexThree = "0000000000000000000000000000000000000000000000000000000000000003"
IndexFour = "0000000000000000000000000000000000000000000000000000000000000004"
IndexFive = "0000000000000000000000000000000000000000000000000000000000000005"
IndexSix = "0000000000000000000000000000000000000000000000000000000000000006"
IndexSeven = "0000000000000000000000000000000000000000000000000000000000000007"
IndexEight = "0000000000000000000000000000000000000000000000000000000000000008"
IndexNine = "0000000000000000000000000000000000000000000000000000000000000009"
IndexTen = "0000000000000000000000000000000000000000000000000000000000000010"
IndexEleven = "0000000000000000000000000000000000000000000000000000000000000011 "
)
func GetMapping(indexOnContract, key string) common.Hash {
keyBytes := common.FromHex("0x" + key + indexOnContract)
encoded := crypto.Keccak256(keyBytes)
return common.BytesToHash(encoded)
}
func GetNestedMapping(indexOnContract, primaryKey, secondaryKey string) common.Hash {
primaryMappingIndex := crypto.Keccak256(common.FromHex(primaryKey + indexOnContract))
secondaryMappingIndex := crypto.Keccak256(common.FromHex(secondaryKey), primaryMappingIndex)
return common.BytesToHash(secondaryMappingIndex)
}
func GetIncrementedKey(original common.Hash, incrementBy int64) common.Hash {
originalMappingAsInt := original.Big()
incremented := big.NewInt(0).Add(originalMappingAsInt, big.NewInt(incrementBy))
return common.BytesToHash(incremented.Bytes())
}

View File

@ -1,43 +0,0 @@
// This should be the output from running composeAndExecute with compose.toml
package main
import (
cat_chop_lump "github.com/vulcanize/mcd_transformers/transformers/cat_file/chop_lump"
bite "github.com/vulcanize/mcd_transformers/transformers/bite"
cat_flip "github.com/vulcanize/mcd_transformers/transformers/cat_file/flip"
cat_pit_vow "github.com/vulcanize/mcd_transformers/transformers/cat_file/pit_vow"
deal "github.com/vulcanize/mcd_transformers/transformers/deal"
dent "github.com/vulcanize/mcd_transformers/transformers/dent"
drip_drip "github.com/vulcanize/mcd_transformers/transformers/drip_drip"
drip_file_ilk "github.com/vulcanize/mcd_transformers/transformers/drip_file/ilk"
drip_file_repo "github.com/vulcanize/mcd_transformers/transformers/drip_file/repo"
drip_file_vow "github.com/vulcanize/mcd_transformers/transformers/drip_file/vow"
flap_kick "github.com/vulcanize/mcd_transformers/transformers/flap_kick"
flip_kick "github.com/vulcanize/mcd_transformers/transformers/flip_kick"
flop_kick "github.com/vulcanize/mcd_transformers/transformers/flop_kick"
frob "github.com/vulcanize/mcd_transformers/transformers/frob"
pit_file_debt_ceiling "github.com/vulcanize/mcd_transformers/transformers/pit_file/debt_ceiling"
pit_file_ilk "github.com/vulcanize/mcd_transformers/transformers/pit_file/ilk"
price_feeds "github.com/vulcanize/mcd_transformers/transformers/price_feeds"
tend "github.com/vulcanize/mcd_transformers/transformers/tend"
vat_flux "github.com/vulcanize/mcd_transformers/transformers/vat_flux"
vat_fold "github.com/vulcanize/mcd_transformers/transformers/vat_fold"
vat_grab "github.com/vulcanize/mcd_transformers/transformers/vat_grab"
vat_heal "github.com/vulcanize/mcd_transformers/transformers/vat_heal"
vat_init "github.com/vulcanize/mcd_transformers/transformers/vat_init"
vat_move "github.com/vulcanize/mcd_transformers/transformers/vat_move"
vat_slip "github.com/vulcanize/mcd_transformers/transformers/vat_slip"
vat_toll "github.com/vulcanize/mcd_transformers/transformers/vat_toll"
vat_tune "github.com/vulcanize/mcd_transformers/transformers/vat_tune"
vow_flog "github.com/vulcanize/mcd_transformers/transformers/vow_flog"
"github.com/vulcanize/vulcanizedb/libraries/shared/transformer"
)
type exporter string
var Exporter exporter
func (e exporter) Export() []transformer.TransformerInitializer {
return []transformer.TransformerInitializer{deal.TransformerInitializer, cat_chop_lump.TransformerInitializer, vat_slip.TransformerInitializer, bite.TransformerInitializer, vat_heal.TransformerInitializer, vat_toll.TransformerInitializer, price_feeds.TransformerInitializer, vat_init.TransformerInitializer, cat_pit_vow.TransformerInitializer, drip_drip.TransformerInitializer, vat_grab.TransformerInitializer, tend.TransformerInitializer, pit_file_ilk.TransformerInitializer, vat_fold.TransformerInitializer, vat_tune.TransformerInitializer, dent.TransformerInitializer, vow_flog.TransformerInitializer, flip_kick.TransformerInitializer, vat_flux.TransformerInitializer, frob.TransformerInitializer, flap_kick.TransformerInitializer, drip_file_repo.TransformerInitializer, flop_kick.TransformerInitializer, vat_move.TransformerInitializer, cat_flip.TransformerInitializer, drip_file_ilk.TransformerInitializer, drip_file_vow.TransformerInitializer, pit_file_debt_ceiling.TransformerInitializer}
}

View File

@ -99,51 +99,6 @@ func CleanTestDB(db *postgres.DB) {
db.MustExec("DELETE FROM checked_headers") db.MustExec("DELETE FROM checked_headers")
db.MustExec("DELETE FROM log_filters") db.MustExec("DELETE FROM log_filters")
db.MustExec("DELETE FROM logs") db.MustExec("DELETE FROM logs")
db.MustExec("DELETE FROM maker.bite")
db.MustExec("DELETE FROM maker.cat_file_chop_lump")
db.MustExec("DELETE FROM maker.cat_file_flip")
db.MustExec("DELETE FROM maker.cat_file_pit_vow")
db.MustExec("DELETE FROM maker.deal")
db.MustExec("DELETE FROM maker.dent")
db.MustExec("DELETE FROM maker.drip_drip")
db.MustExec("DELETE FROM maker.drip_file_ilk")
db.MustExec("DELETE FROM maker.drip_file_repo")
db.MustExec("DELETE FROM maker.drip_file_vow")
db.MustExec("DELETE FROM maker.flap_kick")
db.MustExec("DELETE FROM maker.flip_kick")
db.MustExec("DELETE FROM maker.flop_kick")
db.MustExec("DELETE FROM maker.frob")
db.MustExec("DELETE FROM maker.pit_drip")
db.MustExec("DELETE FROM maker.pit_file_debt_ceiling")
db.MustExec("DELETE FROM maker.pit_file_ilk")
db.MustExec("DELETE FROM maker.pit_ilk_line")
db.MustExec("DELETE FROM maker.pit_ilk_spot")
db.MustExec("DELETE FROM maker.pit_line")
db.MustExec("DELETE FROM maker.pit_live")
db.MustExec("DELETE FROM maker.pit_vat")
db.MustExec("DELETE FROM maker.price_feeds")
db.MustExec("DELETE FROM maker.tend")
db.MustExec("DELETE FROM maker.vat_dai")
db.MustExec("DELETE FROM maker.vat_debt")
db.MustExec("DELETE FROM maker.vat_flux")
db.MustExec("DELETE FROM maker.vat_fold")
db.MustExec("DELETE FROM maker.vat_gem")
db.MustExec("DELETE FROM maker.vat_grab")
db.MustExec("DELETE FROM maker.vat_heal")
db.MustExec("DELETE FROM maker.vat_ilk_art")
db.MustExec("DELETE FROM maker.vat_ilk_ink")
db.MustExec("DELETE FROM maker.vat_ilk_rate")
db.MustExec("DELETE FROM maker.vat_ilk_take")
db.MustExec("DELETE FROM maker.vat_init")
db.MustExec("DELETE FROM maker.vat_move")
db.MustExec("DELETE FROM maker.vat_sin")
db.MustExec("DELETE FROM maker.vat_slip")
db.MustExec("DELETE FROM maker.vat_toll")
db.MustExec("DELETE FROM maker.vat_tune")
db.MustExec("DELETE FROM maker.vat_urn_art")
db.MustExec("DELETE FROM maker.vat_urn_ink")
db.MustExec("DELETE FROM maker.vat_vice")
db.MustExec("DELETE FROM maker.vow_flog")
db.MustExec("DELETE FROM receipts") db.MustExec("DELETE FROM receipts")
db.MustExec("DELETE FROM transactions") db.MustExec("DELETE FROM transactions")
db.MustExec("DELETE FROM watched_contracts") db.MustExec("DELETE FROM watched_contracts")