From 35c5808a07e588198bb0b3dc686e3a9ec437b538 Mon Sep 17 00:00:00 2001 From: Ian Norden Date: Fri, 8 Mar 2019 10:38:35 -0600 Subject: [PATCH] readme fixes --- README.md | 4 +- libraries/shared/factories/README.md | 98 +++++++++++++--------------- 2 files changed, 48 insertions(+), 54 deletions(-) diff --git a/README.md b/README.md index 193ae7cd..f475c0df 100644 --- a/README.md +++ b/README.md @@ -359,8 +359,10 @@ or [StorageTransformerInitializer](https://github.com/vulcanize/maker-vulcanized * Design the transformers to work in the context of their [event](https://github.com/vulcanize/maker-vulcanizedb/blob/staging/libraries/shared/watcher/event_watcher.go#L83) or [storage](https://github.com/vulcanize/maker-vulcanizedb/blob/staging/libraries/shared/watcher/storage_watcher.go#L58) watcher execution modes * Create db migrations to run against vulcanizeDB so that we can store the transformer output + * Do not `goose fix` the transformer migrations * Specify migration locations for each transformer in the config with the `exporter.transformer.migrations` fields - * Do not `goose fix` the transformer migrations + * If the base vDB migrations occupy this path as well, they need to be in their `goose fix`ed form + as they are [here](https://github.com/vulcanize/vulcanizedb/tree/master/db/migrations) To update a plugin repository with changes to the core vulcanizedb repository, replace the vulcanizedb vendored in the plugin repo (`plugin_repo/vendor/github.com/vulcanize/vulcanizedb`) with the newly updated version diff --git a/libraries/shared/factories/README.md b/libraries/shared/factories/README.md index e8adc3dc..87fc9162 100644 --- a/libraries/shared/factories/README.md +++ b/libraries/shared/factories/README.md @@ -11,7 +11,7 @@ with our `checked_headers` table. This approach assumes you are running a vDB light sync which is itself run against a light Ethereum node, this approach also assumes there is a full node available. -Looking forward, we will be building fetchers than enable sourcing data from IPFS instead of an ETH node. +Looking forward, we will be building fetchers that enable sourcing data from IPFS instead of an ETH node. ## Shared Code @@ -25,7 +25,7 @@ Using the `compose` or `composeAndExecute` command, event watchers can be loaded ### [Event Transformer](https://github.com/vulcanize/maker-vulcanizedb/blob/staging/libraries/shared/transformer/event_transformer.go) -The event transformer is responsible for converting event logs into more useful data objects and storing them in postgres. +The event transformer is responsible for converting event logs into more useful data objects and storing them in Postgres. The event transformer is composed of converter and repository interfaces and a config struct: ```go type Transformer struct { @@ -83,11 +83,11 @@ In order to watch events at a smart contract, for those events the developer mus 1. Config - struct to hold configuration information (contract address, starting block, event name and signature). 1. Entity - struct to unpack the event log into. -1. Model - struct representing the final data model we want to write to Postres. +1. Model - struct representing the final data model we want to write to Postgres. 1. Converter - an interface which can unpack event logs into our entities and convert those entities to our models. 1. Repository - an interface to write our models to Postgres. -1. TransformerInitializer - a public variable which exports our configured transformer to be loaded as part of a plugin, -1. DB migrations - migrations to generate the Posstgres schema, tables, views, function, etc that are needed to store and interface with the transformed data models. +1. TransformerInitializer - a public variable which exports our configured transformer to be loaded as part of a plugin. +1. DB migrations - migrations to generate the Postgres schema, tables, views, function, etc that are needed to store and interface with the transformed data models. The example event we will use looks like: ``` @@ -97,7 +97,7 @@ event ExampleEvent(bytes32 indexed arg1, address indexed arg2, bytes32 arg3, uin ### Config The config holds configuration variables for the event transformer, including a name for the transformer, the contract address -it is working at, the contract's abi, the topic (e.g. event signature; topic0) that it is filtering for, and starting +it is working at, the contract's ABI, the topic (e.g. event signature; topic0) that it is filtering for, and starting and ending block numbers. ```go @@ -118,9 +118,9 @@ TransactionIndex, and the Raw log are retained in order to link the data to it's ```go type ExampleEntity struct { - Arg1 [32]byte + Arg1 common.Hash Arg2 common.Address - Arg3 [32]byte + Arg3 common.Hash Arg4 *big.Int Arg5 *big.Int LogIndex uint @@ -195,40 +195,43 @@ func (ExampleConverter) ToEntities(contractAbi string, ethLogs []types.Log) ([]i func (converter ExampleConverter) ToModels(entities []interface{}) ([]interface{}, error) { var models []interface{} for _, entity := range entities { - entity, ok := entity.(ExampleModel) - if !ok { - return nil, fmt.Errorf("entity of type %T, not %T", entity, ExampleModel{}) - } - - fractionSkimmed, err := hexutil.DecodeBig(entity.Arg3) + entity, ok := entity.(ExampleModel) + if !ok { + return nil, fmt.Errorf("entity of type %T, not %T", entity, ExampleModel{}) + } + + fractionSkimmed, err := hexutil.DecodeBig(entity.Arg3.Hex()) + if err != nil { + reuturn nil, err + } position := new(big.Int) position.Sub(entity.Arg4, entity.Arg5) - finalPosition := new(big.Int) - if preTaxPosition.Sign() < 0 { - finalPosition = position - } else { - skim := new(big.Int) - skim.Div(position, fractionSkimmed) - finalPosition = position.Sub(position, skim) - } - - rawLog, err := json.Marshal(entity.Raw) - if err != nil { - return nil, err - } - - model := ExampleModel{ - EventHash: common.BytesToHash([]byte(entity.Arg1[:])).Hex(), + finalPosition := new(big.Int) + if preTaxPosition.Sign() < 0 { + finalPosition = position + } else { + skim := new(big.Int) + skim.Div(position, fractionSkimmed) + finalPosition = position.Sub(position, skim) + } + + rawLog, err := json.Marshal(entity.Raw) + if err != nil { + return nil, err + } + + model := ExampleModel{ + EventHash: entity.Arg1.Hex(), UserAddress: entity.Arg2.Hex(), FractionSkimmed: fractionSkimmed.String(), Surplus: entity.Arg4.String(), Deficit: entity.Arg5.String(), FinalPosition: finalPosition, - LogIndex: entity.LogIndex, - TransactionIndex: entity.TransactionIndex, - Raw: rawLog, - } - models = append(models, model) + LogIndex: entity.LogIndex, + TransactionIndex: entity.TransactionIndex, + Raw: rawLog, + } + models = append(models, model) } return models, nil } @@ -274,16 +277,6 @@ func (repository ExampleRepository) Create(headerID int64, models []interface{}) } return fmt.Errorf("model of type %T, not %T", model, ExampleModel{}) } - - EventHash: common.BytesToHash([]byte(entity.Arg1[:])).Hex(), - UserAddress: entity.Arg2.Hex(), - FractionSkimmed: fractionSkimmed.String(), - Surplus: entity.Arg4.String(), - Deficit: entity.Arg5.String(), - FinalPosition: finalPosition, - LogIndex: entity.LogIndex, - TransactionIndex: entity.TransactionIndex, - Raw: rawLog, _, execErr := tx.Exec( `INSERT into example_schema.example_event (header_id, event_hash, user_address, fraction_skimmed, surplus, deficit, final_position, log_idx, tx_idx, raw_log) @@ -363,10 +356,10 @@ CREATE TABLE example_schema.example_event ( header_id INTEGER NOT NULL REFERENCES headers (id) ON DELETE CASCADE, event_hash CHARACTER VARYING(66) NOT NULL, user_address CHARACTER VARYING(66) NOT NULL, - fraction_skimmed NUMERIC, - surplus NUMERIC, - deficit NUMERIC, - final_position NUMERIC, + fraction_skimmed NUMERIC NOT NULL, + surplus NUMERIC NOT NULL, + deficit NUMERIC NOT NULL, + final_position NUMERIC NOT NULL, tx_idx INTEGER NOT NUll, log_idx INTEGER NOT NUll, raw_log JSONB, @@ -389,8 +382,7 @@ of which headers we have already filtered through for this event. ## Summary -To create a transformer for a contract event, we need to create entities to unpack the raw log into, models to represent +To create a transformer for a contract event we need to create entities for unpacking the raw log, models to represent the final data structure, a converter to mediate this unpacking and conversion between entities to models, a repository to write -these models to Postres, db migrations to accommodate these models in Postgres, and a TransformerInitializer to export the -configured transformer and load it as a plugin to the `compose` or `composeAndExecute` commands. Note that these commands also -need a .toml config file, as described in the [main readme](https://github.com/vulcanize/maker-vulcanizedb/blob/staging/README.md#composeandexecute-configuration) +these models to Postgres, db migrations to accommodate these models in Postgres, and a TransformerInitializer to export the +configured transformer and load it as a plugin to the `compose` or `composeAndExecute` commands as described in the [main readme](https://github.com/vulcanize/maker-vulcanizedb/blob/staging/README.md#composeandexecute-configuration).