From 3693ed905fd5b4a62a63d0bc51c130460188426d Mon Sep 17 00:00:00 2001 From: Rob Mulholand Date: Thu, 18 Jul 2019 16:24:25 -0500 Subject: [PATCH 01/21] Rename logs to full sync logs - Enable creating new table for logs used in event watching based on header sync --- ... => 00007_create_full_sync_logs_table.sql} | 6 +- .../00016_add_receipts_fk_to_logs.sql | 16 +-- .../00018_create_watched_event_logs.sql | 28 ++--- db/schema.sql | 114 +++++++++--------- .../full/retriever/block_retriever.go | 2 +- .../shared/helpers/test_helpers/database.go | 6 +- .../shared/repository/event_repository.go | 2 +- .../postgres/repositories/block_repository.go | 2 +- .../full_sync_receipt_repository.go | 2 +- .../postgres/repositories/logs_repository.go | 4 +- test_config/test_config.go | 12 +- 11 files changed, 97 insertions(+), 97 deletions(-) rename db/migrations/{00007_create_logs_table.sql => 00007_create_full_sync_logs_table.sql} (72%) diff --git a/db/migrations/00007_create_logs_table.sql b/db/migrations/00007_create_full_sync_logs_table.sql similarity index 72% rename from db/migrations/00007_create_logs_table.sql rename to db/migrations/00007_create_full_sync_logs_table.sql index f85d35bd..985bb3cc 100644 --- a/db/migrations/00007_create_logs_table.sql +++ b/db/migrations/00007_create_full_sync_logs_table.sql @@ -1,5 +1,5 @@ -- +goose Up -CREATE TABLE logs ( +CREATE TABLE full_sync_logs ( id SERIAL PRIMARY KEY, block_number BIGINT, address VARCHAR(66), @@ -10,10 +10,10 @@ CREATE TABLE logs ( topic2 VARCHAR(66), topic3 VARCHAR(66), data TEXT, - CONSTRAINT log_uc UNIQUE (block_number, index) + CONSTRAINT full_sync_log_uc UNIQUE (block_number, index) ); -- +goose Down -DROP TABLE logs; +DROP TABLE full_sync_logs; diff --git a/db/migrations/00016_add_receipts_fk_to_logs.sql b/db/migrations/00016_add_receipts_fk_to_logs.sql index 9301e545..336f911e 100644 --- a/db/migrations/00016_add_receipts_fk_to_logs.sql +++ b/db/migrations/00016_add_receipts_fk_to_logs.sql @@ -1,11 +1,11 @@ -- +goose Up -ALTER TABLE logs - DROP CONSTRAINT log_uc; +ALTER TABLE full_sync_logs + DROP CONSTRAINT full_sync_log_uc; -ALTER TABLE logs +ALTER TABLE full_sync_logs ADD COLUMN receipt_id INT; -ALTER TABLE logs +ALTER TABLE full_sync_logs ADD CONSTRAINT receipts_fk FOREIGN KEY (receipt_id) REFERENCES full_sync_receipts (id) @@ -13,11 +13,11 @@ ON DELETE CASCADE; -- +goose Down -ALTER TABLE logs +ALTER TABLE full_sync_logs DROP CONSTRAINT receipts_fk; -ALTER TABLE logs +ALTER TABLE full_sync_logs DROP COLUMN receipt_id; -ALTER TABLE logs - ADD CONSTRAINT log_uc UNIQUE (block_number, index); +ALTER TABLE full_sync_logs + ADD CONSTRAINT full_sync_log_uc UNIQUE (block_number, index); diff --git a/db/migrations/00018_create_watched_event_logs.sql b/db/migrations/00018_create_watched_event_logs.sql index ae17ee08..cdc1bffe 100644 --- a/db/migrations/00018_create_watched_event_logs.sql +++ b/db/migrations/00018_create_watched_event_logs.sql @@ -3,31 +3,31 @@ CREATE VIEW block_stats AS SELECT max(block_number) AS max_block, min(block_number) AS min_block - FROM logs; + FROM full_sync_logs; CREATE VIEW watched_event_logs AS SELECT log_filters.name, - logs.id, + full_sync_logs.id, block_number, - logs.address, + full_sync_logs.address, tx_hash, index, - logs.topic0, - logs.topic1, - logs.topic2, - logs.topic3, + full_sync_logs.topic0, + full_sync_logs.topic1, + full_sync_logs.topic2, + full_sync_logs.topic3, data, receipt_id FROM log_filters CROSS JOIN block_stats - JOIN logs ON logs.address = log_filters.address - AND logs.block_number >= coalesce(log_filters.from_block, block_stats.min_block) - AND logs.block_number <= coalesce(log_filters.to_block, block_stats.max_block) - WHERE (log_filters.topic0 = logs.topic0 OR log_filters.topic0 ISNULL) - AND (log_filters.topic1 = logs.topic1 OR log_filters.topic1 ISNULL) - AND (log_filters.topic2 = logs.topic2 OR log_filters.topic2 ISNULL) - AND (log_filters.topic3 = logs.topic3 OR log_filters.topic3 ISNULL); + JOIN full_sync_logs ON full_sync_logs.address = log_filters.address + AND full_sync_logs.block_number >= coalesce(log_filters.from_block, block_stats.min_block) + AND full_sync_logs.block_number <= coalesce(log_filters.to_block, block_stats.max_block) + WHERE (log_filters.topic0 = full_sync_logs.topic0 OR log_filters.topic0 ISNULL) + AND (log_filters.topic1 = full_sync_logs.topic1 OR log_filters.topic1 ISNULL) + AND (log_filters.topic2 = full_sync_logs.topic2 OR log_filters.topic2 ISNULL) + AND (log_filters.topic3 = full_sync_logs.topic3 OR log_filters.topic3 ISNULL); -- +goose Down DROP VIEW watched_event_logs; diff --git a/db/schema.sql b/db/schema.sql index 5470a107..d58aa9a2 100644 --- a/db/schema.sql +++ b/db/schema.sql @@ -2,8 +2,8 @@ -- PostgreSQL database dump -- --- Dumped from database version 11.3 --- Dumped by pg_dump version 11.3 +-- Dumped from database version 11.4 +-- Dumped by pg_dump version 11.4 SET statement_timeout = 0; SET lock_timeout = 0; @@ -51,10 +51,10 @@ ALTER SEQUENCE public.addresses_id_seq OWNED BY public.addresses.id; -- --- Name: logs; Type: TABLE; Schema: public; Owner: - +-- Name: full_sync_logs; Type: TABLE; Schema: public; Owner: - -- -CREATE TABLE public.logs ( +CREATE TABLE public.full_sync_logs ( id integer NOT NULL, block_number bigint, address character varying(66), @@ -74,9 +74,9 @@ CREATE TABLE public.logs ( -- CREATE VIEW public.block_stats AS - SELECT max(logs.block_number) AS max_block, - min(logs.block_number) AS min_block - FROM public.logs; + SELECT max(full_sync_logs.block_number) AS max_block, + min(full_sync_logs.block_number) AS min_block + FROM public.full_sync_logs; -- @@ -168,6 +168,26 @@ CREATE TABLE public.eth_nodes ( ); +-- +-- Name: full_sync_logs_id_seq; Type: SEQUENCE; Schema: public; Owner: - +-- + +CREATE SEQUENCE public.full_sync_logs_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +-- +-- Name: full_sync_logs_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: - +-- + +ALTER SEQUENCE public.full_sync_logs_id_seq OWNED BY public.full_sync_logs.id; + + -- -- Name: full_sync_receipts; Type: TABLE; Schema: public; Owner: - -- @@ -429,26 +449,6 @@ CREATE SEQUENCE public.log_filters_id_seq ALTER SEQUENCE public.log_filters_id_seq OWNED BY public.log_filters.id; --- --- Name: logs_id_seq; Type: SEQUENCE; Schema: public; Owner: - --- - -CREATE SEQUENCE public.logs_id_seq - AS integer - START WITH 1 - INCREMENT BY 1 - NO MINVALUE - NO MAXVALUE - CACHE 1; - - --- --- Name: logs_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: - --- - -ALTER SEQUENCE public.logs_id_seq OWNED BY public.logs.id; - - -- -- Name: nodes_id_seq; Type: SEQUENCE; Schema: public; Owner: - -- @@ -577,21 +577,21 @@ ALTER SEQUENCE public.watched_contracts_contract_id_seq OWNED BY public.watched_ CREATE VIEW public.watched_event_logs AS SELECT log_filters.name, - logs.id, - logs.block_number, - logs.address, - logs.tx_hash, - logs.index, - logs.topic0, - logs.topic1, - logs.topic2, - logs.topic3, - logs.data, - logs.receipt_id + full_sync_logs.id, + full_sync_logs.block_number, + full_sync_logs.address, + full_sync_logs.tx_hash, + full_sync_logs.index, + full_sync_logs.topic0, + full_sync_logs.topic1, + full_sync_logs.topic2, + full_sync_logs.topic3, + full_sync_logs.data, + full_sync_logs.receipt_id FROM ((public.log_filters CROSS JOIN public.block_stats) - JOIN public.logs ON ((((logs.address)::text = (log_filters.address)::text) AND (logs.block_number >= COALESCE(log_filters.from_block, block_stats.min_block)) AND (logs.block_number <= COALESCE(log_filters.to_block, block_stats.max_block))))) - WHERE ((((log_filters.topic0)::text = (logs.topic0)::text) OR (log_filters.topic0 IS NULL)) AND (((log_filters.topic1)::text = (logs.topic1)::text) OR (log_filters.topic1 IS NULL)) AND (((log_filters.topic2)::text = (logs.topic2)::text) OR (log_filters.topic2 IS NULL)) AND (((log_filters.topic3)::text = (logs.topic3)::text) OR (log_filters.topic3 IS NULL))); + JOIN public.full_sync_logs ON ((((full_sync_logs.address)::text = (log_filters.address)::text) AND (full_sync_logs.block_number >= COALESCE(log_filters.from_block, block_stats.min_block)) AND (full_sync_logs.block_number <= COALESCE(log_filters.to_block, block_stats.max_block))))) + WHERE ((((log_filters.topic0)::text = (full_sync_logs.topic0)::text) OR (log_filters.topic0 IS NULL)) AND (((log_filters.topic1)::text = (full_sync_logs.topic1)::text) OR (log_filters.topic1 IS NULL)) AND (((log_filters.topic2)::text = (full_sync_logs.topic2)::text) OR (log_filters.topic2 IS NULL)) AND (((log_filters.topic3)::text = (full_sync_logs.topic3)::text) OR (log_filters.topic3 IS NULL))); -- @@ -622,6 +622,13 @@ ALTER TABLE ONLY public.checked_headers ALTER COLUMN id SET DEFAULT nextval('pub ALTER TABLE ONLY public.eth_nodes ALTER COLUMN id SET DEFAULT nextval('public.nodes_id_seq'::regclass); +-- +-- Name: full_sync_logs id; Type: DEFAULT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.full_sync_logs ALTER COLUMN id SET DEFAULT nextval('public.full_sync_logs_id_seq'::regclass); + + -- -- Name: full_sync_receipts id; Type: DEFAULT; Schema: public; Owner: - -- @@ -671,13 +678,6 @@ ALTER TABLE ONLY public.headers ALTER COLUMN id SET DEFAULT nextval('public.head ALTER TABLE ONLY public.log_filters ALTER COLUMN id SET DEFAULT nextval('public.log_filters_id_seq'::regclass); --- --- Name: logs id; Type: DEFAULT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.logs ALTER COLUMN id SET DEFAULT nextval('public.logs_id_seq'::regclass); - - -- -- Name: queued_storage id; Type: DEFAULT; Schema: public; Owner: - -- @@ -755,6 +755,14 @@ ALTER TABLE ONLY public.eth_nodes ADD CONSTRAINT eth_node_uc UNIQUE (genesis_block, network_id, eth_node_id); +-- +-- Name: full_sync_logs full_sync_logs_pkey; Type: CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.full_sync_logs + ADD CONSTRAINT full_sync_logs_pkey PRIMARY KEY (id); + + -- -- Name: full_sync_receipts full_sync_receipts_pkey; Type: CONSTRAINT; Schema: public; Owner: - -- @@ -819,14 +827,6 @@ ALTER TABLE ONLY public.headers ADD CONSTRAINT headers_pkey PRIMARY KEY (id); --- --- Name: logs logs_pkey; Type: CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.logs - ADD CONSTRAINT logs_pkey PRIMARY KEY (id); - - -- -- Name: log_filters name_uc; Type: CONSTRAINT; Schema: public; Owner: - -- @@ -1014,10 +1014,10 @@ ALTER TABLE ONLY public.blocks -- --- Name: logs receipts_fk; Type: FK CONSTRAINT; Schema: public; Owner: - +-- Name: full_sync_logs receipts_fk; Type: FK CONSTRAINT; Schema: public; Owner: - -- -ALTER TABLE ONLY public.logs +ALTER TABLE ONLY public.full_sync_logs ADD CONSTRAINT receipts_fk FOREIGN KEY (receipt_id) REFERENCES public.full_sync_receipts(id) ON DELETE CASCADE; diff --git a/pkg/contract_watcher/full/retriever/block_retriever.go b/pkg/contract_watcher/full/retriever/block_retriever.go index 80f330ad..8d720263 100644 --- a/pkg/contract_watcher/full/retriever/block_retriever.go +++ b/pkg/contract_watcher/full/retriever/block_retriever.go @@ -81,7 +81,7 @@ func (r *blockRetriever) retrieveFirstBlockFromLogs(contractAddr string) (int64, var firstBlock int err := r.db.Get( &firstBlock, - "SELECT block_number FROM logs WHERE lower(address) = $1 ORDER BY block_number ASC LIMIT 1", + "SELECT block_number FROM full_sync_logs WHERE lower(address) = $1 ORDER BY block_number ASC LIMIT 1", contractAddr, ) diff --git a/pkg/contract_watcher/shared/helpers/test_helpers/database.go b/pkg/contract_watcher/shared/helpers/test_helpers/database.go index 294a74c6..66020ea1 100644 --- a/pkg/contract_watcher/shared/helpers/test_helpers/database.go +++ b/pkg/contract_watcher/shared/helpers/test_helpers/database.go @@ -149,7 +149,7 @@ func SetupTusdRepo(vulcanizeLogId *int64, wantedEvents, wantedMethods []string) err = receiptRepository.CreateReceiptsAndLogs(blockId, receipts) Expect(err).ToNot(HaveOccurred()) - err = logRepository.Get(vulcanizeLogId, `SELECT id FROM logs`) + err = logRepository.Get(vulcanizeLogId, `SELECT id FROM full_sync_logs`) Expect(err).ToNot(HaveOccurred()) info := SetupTusdContract(wantedEvents, wantedMethods) @@ -195,7 +195,7 @@ func SetupENSRepo(vulcanizeLogId *int64, wantedEvents, wantedMethods []string) ( err = receiptRepository.CreateReceiptsAndLogs(blockId, receipts) Expect(err).ToNot(HaveOccurred()) - err = logRepository.Get(vulcanizeLogId, `SELECT id FROM logs`) + err = logRepository.Get(vulcanizeLogId, `SELECT id FROM full_sync_logs`) Expect(err).ToNot(HaveOccurred()) info := SetupENSContract(wantedEvents, wantedMethods) @@ -234,7 +234,7 @@ func TearDown(db *postgres.DB) { _, err = tx.Exec(`DELETE FROM headers`) Expect(err).NotTo(HaveOccurred()) - _, err = tx.Exec(`DELETE FROM logs`) + _, err = tx.Exec(`DELETE FROM full_sync_logs`) Expect(err).NotTo(HaveOccurred()) _, err = tx.Exec(`DELETE FROM log_filters`) diff --git a/pkg/contract_watcher/shared/repository/event_repository.go b/pkg/contract_watcher/shared/repository/event_repository.go index 580997ad..b95cf413 100644 --- a/pkg/contract_watcher/shared/repository/event_repository.go +++ b/pkg/contract_watcher/shared/repository/event_repository.go @@ -231,7 +231,7 @@ func (r *eventRepository) newEventTable(tableID string, event types.Event) error for _, field := range event.Fields { pgStr = pgStr + fmt.Sprintf(" %s_ %s NOT NULL,", strings.ToLower(field.Name), field.PgType) } - pgStr = pgStr + " CONSTRAINT log_index_fk FOREIGN KEY (vulcanize_log_id) REFERENCES logs (id) ON DELETE CASCADE)" + pgStr = pgStr + " CONSTRAINT log_index_fk FOREIGN KEY (vulcanize_log_id) REFERENCES full_sync_logs (id) ON DELETE CASCADE)" case types.HeaderSync: pgStr = pgStr + "(id SERIAL, header_id INTEGER NOT NULL REFERENCES headers (id) ON DELETE CASCADE, token_name CHARACTER VARYING(66) NOT NULL, raw_log JSONB, log_idx INTEGER NOT NULL, tx_idx INTEGER NOT NULL," diff --git a/pkg/datastore/postgres/repositories/block_repository.go b/pkg/datastore/postgres/repositories/block_repository.go index 2242efef..705f2589 100644 --- a/pkg/datastore/postgres/repositories/block_repository.go +++ b/pkg/datastore/postgres/repositories/block_repository.go @@ -271,7 +271,7 @@ func (blockRepository BlockRepository) getBlockHash(block core.Block) (string, b func (blockRepository BlockRepository) createLogs(tx *sqlx.Tx, logs []core.Log, receiptId int64) error { for _, tlog := range logs { _, err := tx.Exec( - `INSERT INTO logs (block_number, address, tx_hash, index, topic0, topic1, topic2, topic3, data, receipt_id) + `INSERT INTO full_sync_logs (block_number, address, tx_hash, index, topic0, topic1, topic2, topic3, data, receipt_id) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10) `, tlog.BlockNumber, tlog.Address, tlog.TxHash, tlog.Index, tlog.Topics[0], tlog.Topics[1], tlog.Topics[2], tlog.Topics[3], tlog.Data, receiptId, diff --git a/pkg/datastore/postgres/repositories/full_sync_receipt_repository.go b/pkg/datastore/postgres/repositories/full_sync_receipt_repository.go index 71a345f6..50eb86be 100644 --- a/pkg/datastore/postgres/repositories/full_sync_receipt_repository.go +++ b/pkg/datastore/postgres/repositories/full_sync_receipt_repository.go @@ -56,7 +56,7 @@ func (receiptRepository FullSyncReceiptRepository) CreateReceiptsAndLogs(blockId func createLogs(logs []core.Log, receiptId int64, tx *sqlx.Tx) error { for _, log := range logs { _, err := tx.Exec( - `INSERT INTO logs (block_number, address, tx_hash, index, topic0, topic1, topic2, topic3, data, receipt_id) + `INSERT INTO full_sync_logs (block_number, address, tx_hash, index, topic0, topic1, topic2, topic3, data, receipt_id) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10) `, log.BlockNumber, log.Address, log.TxHash, log.Index, log.Topics[0], log.Topics[1], log.Topics[2], log.Topics[3], log.Data, receiptId, diff --git a/pkg/datastore/postgres/repositories/logs_repository.go b/pkg/datastore/postgres/repositories/logs_repository.go index 4c13e723..34cdddf4 100644 --- a/pkg/datastore/postgres/repositories/logs_repository.go +++ b/pkg/datastore/postgres/repositories/logs_repository.go @@ -33,7 +33,7 @@ func (logRepository LogRepository) CreateLogs(lgs []core.Log, receiptId int64) e tx, _ := logRepository.DB.Beginx() for _, tlog := range lgs { _, insertLogErr := tx.Exec( - `INSERT INTO logs (block_number, address, tx_hash, index, topic0, topic1, topic2, topic3, data, receipt_id) + `INSERT INTO full_sync_logs (block_number, address, tx_hash, index, topic0, topic1, topic2, topic3, data, receipt_id) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10) `, tlog.BlockNumber, tlog.Address, tlog.TxHash, tlog.Index, tlog.Topics[0], tlog.Topics[1], tlog.Topics[2], tlog.Topics[3], tlog.Data, receiptId, @@ -68,7 +68,7 @@ func (logRepository LogRepository) GetLogs(address string, blockNumber int64) ([ topic2, topic3, data - FROM logs + FROM full_sync_logs WHERE address = $1 AND block_number = $2 ORDER BY block_number DESC`, address, blockNumber) if err != nil { diff --git a/test_config/test_config.go b/test_config/test_config.go index 33f6b5bc..b8f62b72 100644 --- a/test_config/test_config.go +++ b/test_config/test_config.go @@ -109,15 +109,15 @@ func CleanTestDB(db *postgres.DB) { db.MustExec("DELETE FROM blocks") db.MustExec("DELETE FROM checked_headers") // can't delete from eth_nodes since this function is called after the required eth_node is persisted + db.MustExec("DELETE FROM full_sync_logs") + db.MustExec("DELETE FROM full_sync_receipts") db.MustExec("DELETE FROM full_sync_transactions") db.MustExec("DELETE FROM goose_db_version") - db.MustExec("DELETE FROM headers") - db.MustExec("DELETE FROM header_sync_transactions") - db.MustExec("DELETE FROM log_filters") - db.MustExec("DELETE FROM logs") - db.MustExec("DELETE FROM queued_storage") - db.MustExec("DELETE FROM full_sync_receipts") db.MustExec("DELETE FROM header_sync_receipts") + db.MustExec("DELETE FROM header_sync_transactions") + db.MustExec("DELETE FROM headers") + db.MustExec("DELETE FROM log_filters") + db.MustExec("DELETE FROM queued_storage") db.MustExec("DELETE FROM watched_contracts") } From 66a4e20b2052ac840bcf45d23416a9fe96b97030 Mon Sep 17 00:00:00 2001 From: Rob Mulholand Date: Fri, 19 Jul 2019 11:58:28 -0500 Subject: [PATCH 02/21] Update event transformer to take header ID - rather than header - enables executing transformers without full header lookup --- .../shared/factories/event/transformer.go | 17 ++++++++-------- .../factories/event/transformer_test.go | 20 +++++++++---------- libraries/shared/mocks/transformer.go | 7 +++---- .../shared/transformer/event_transformer.go | 3 +-- libraries/shared/watcher/event_watcher.go | 6 +++--- 5 files changed, 25 insertions(+), 28 deletions(-) diff --git a/libraries/shared/factories/event/transformer.go b/libraries/shared/factories/event/transformer.go index 72a1ae90..dd2c675a 100644 --- a/libraries/shared/factories/event/transformer.go +++ b/libraries/shared/factories/event/transformer.go @@ -18,10 +18,9 @@ package event import ( "github.com/ethereum/go-ethereum/core/types" - log "github.com/sirupsen/logrus" + "github.com/sirupsen/logrus" "github.com/vulcanize/vulcanizedb/libraries/shared/transformer" - "github.com/vulcanize/vulcanizedb/pkg/core" "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres" ) @@ -36,14 +35,14 @@ func (transformer Transformer) NewTransformer(db *postgres.DB) transformer.Event return transformer } -func (transformer Transformer) Execute(logs []types.Log, header core.Header) error { +func (transformer Transformer) Execute(logs []types.Log, headerID int64) error { transformerName := transformer.Config.TransformerName config := transformer.Config if len(logs) < 1 { - err := transformer.Repository.MarkHeaderChecked(header.Id) + err := transformer.Repository.MarkHeaderChecked(headerID) if err != nil { - log.Printf("Error marking header as checked in %v: %v", transformerName, err) + logrus.Printf("Error marking header as checked in %v: %v", transformerName, err) return err } return nil @@ -51,19 +50,19 @@ func (transformer Transformer) Execute(logs []types.Log, header core.Header) err entities, err := transformer.Converter.ToEntities(config.ContractAbi, logs) if err != nil { - log.Printf("Error converting logs to entities in %v: %v", transformerName, err) + logrus.Printf("Error converting logs to entities in %v: %v", transformerName, err) return err } models, err := transformer.Converter.ToModels(entities) if err != nil { - log.Printf("Error converting entities to models in %v: %v", transformerName, err) + logrus.Printf("Error converting entities to models in %v: %v", transformerName, err) return err } - err = transformer.Repository.Create(header.Id, models) + err = transformer.Repository.Create(headerID, models) if err != nil { - log.Printf("Error persisting %v record: %v", transformerName, err) + logrus.Printf("Error persisting %v record: %v", transformerName, err) return err } diff --git a/libraries/shared/factories/event/transformer_test.go b/libraries/shared/factories/event/transformer_test.go index 086b6707..8a5fadab 100644 --- a/libraries/shared/factories/event/transformer_test.go +++ b/libraries/shared/factories/event/transformer_test.go @@ -59,14 +59,14 @@ var _ = Describe("Transformer", func() { }) It("marks header checked if no logs returned", func() { - err := t.Execute([]types.Log{}, headerOne) + err := t.Execute([]types.Log{}, headerOne.Id) Expect(err).NotTo(HaveOccurred()) repository.AssertMarkHeaderCheckedCalledWith(headerOne.Id) }) It("doesn't attempt to convert or persist an empty collection when there are no logs", func() { - err := t.Execute([]types.Log{}, headerOne) + err := t.Execute([]types.Log{}, headerOne.Id) Expect(err).NotTo(HaveOccurred()) Expect(converter.ToEntitiesCalledCounter).To(Equal(0)) @@ -75,7 +75,7 @@ var _ = Describe("Transformer", func() { }) It("does not call repository.MarkCheckedHeader when there are logs", func() { - err := t.Execute(logs, headerOne) + err := t.Execute(logs, headerOne.Id) Expect(err).NotTo(HaveOccurred()) repository.AssertMarkHeaderCheckedNotCalled() @@ -84,14 +84,14 @@ var _ = Describe("Transformer", func() { It("returns error if marking header checked returns err", func() { repository.SetMarkHeaderCheckedError(fakes.FakeError) - err := t.Execute([]types.Log{}, headerOne) + err := t.Execute([]types.Log{}, headerOne.Id) Expect(err).To(HaveOccurred()) Expect(err).To(MatchError(fakes.FakeError)) }) It("converts an eth log to an entity", func() { - err := t.Execute(logs, headerOne) + err := t.Execute(logs, headerOne.Id) Expect(err).NotTo(HaveOccurred()) Expect(converter.ContractAbi).To(Equal(config.ContractAbi)) @@ -101,7 +101,7 @@ var _ = Describe("Transformer", func() { It("returns an error if converter fails", func() { converter.ToEntitiesError = fakes.FakeError - err := t.Execute(logs, headerOne) + err := t.Execute(logs, headerOne.Id) Expect(err).To(HaveOccurred()) Expect(err).To(MatchError(fakes.FakeError)) @@ -110,7 +110,7 @@ var _ = Describe("Transformer", func() { It("converts an entity to a model", func() { converter.EntitiesToReturn = []interface{}{test_data.GenericEntity{}} - err := t.Execute(logs, headerOne) + err := t.Execute(logs, headerOne.Id) Expect(err).NotTo(HaveOccurred()) Expect(converter.EntitiesToConvert[0]).To(Equal(test_data.GenericEntity{})) @@ -120,7 +120,7 @@ var _ = Describe("Transformer", func() { converter.EntitiesToReturn = []interface{}{test_data.GenericEntity{}} converter.ToModelsError = fakes.FakeError - err := t.Execute(logs, headerOne) + err := t.Execute(logs, headerOne.Id) Expect(err).To(HaveOccurred()) Expect(err).To(MatchError(fakes.FakeError)) @@ -129,7 +129,7 @@ var _ = Describe("Transformer", func() { It("persists the record", func() { converter.ModelsToReturn = []interface{}{test_data.GenericModel{}} - err := t.Execute(logs, headerOne) + err := t.Execute(logs, headerOne.Id) Expect(err).NotTo(HaveOccurred()) Expect(repository.PassedHeaderID).To(Equal(headerOne.Id)) @@ -138,7 +138,7 @@ var _ = Describe("Transformer", func() { It("returns error if persisting the record fails", func() { repository.SetCreateError(fakes.FakeError) - err := t.Execute(logs, headerOne) + err := t.Execute(logs, headerOne.Id) Expect(err).To(HaveOccurred()) Expect(err).To(MatchError(fakes.FakeError)) diff --git a/libraries/shared/mocks/transformer.go b/libraries/shared/mocks/transformer.go index f7b01de3..e8bf65e4 100644 --- a/libraries/shared/mocks/transformer.go +++ b/libraries/shared/mocks/transformer.go @@ -20,7 +20,6 @@ import ( "github.com/ethereum/go-ethereum/core/types" shared_t "github.com/vulcanize/vulcanizedb/libraries/shared/transformer" - "github.com/vulcanize/vulcanizedb/pkg/core" "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres" ) @@ -28,17 +27,17 @@ type MockTransformer struct { ExecuteWasCalled bool ExecuteError error PassedLogs []types.Log - PassedHeader core.Header + PassedHeaderID int64 config shared_t.EventTransformerConfig } -func (mh *MockTransformer) Execute(logs []types.Log, header core.Header) error { +func (mh *MockTransformer) Execute(logs []types.Log, headerID int64) error { if mh.ExecuteError != nil { return mh.ExecuteError } mh.ExecuteWasCalled = true mh.PassedLogs = logs - mh.PassedHeader = header + mh.PassedHeaderID = headerID return nil } diff --git a/libraries/shared/transformer/event_transformer.go b/libraries/shared/transformer/event_transformer.go index 7bf517ef..1bbb8b54 100644 --- a/libraries/shared/transformer/event_transformer.go +++ b/libraries/shared/transformer/event_transformer.go @@ -20,12 +20,11 @@ import ( "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/core/types" - "github.com/vulcanize/vulcanizedb/pkg/core" "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres" ) type EventTransformer interface { - Execute(logs []types.Log, header core.Header) error + Execute(logs []types.Log, headerID int64) error GetConfig() EventTransformerConfig } diff --git a/libraries/shared/watcher/event_watcher.go b/libraries/shared/watcher/event_watcher.go index 0bcc82f8..6c78484b 100644 --- a/libraries/shared/watcher/event_watcher.go +++ b/libraries/shared/watcher/event_watcher.go @@ -122,7 +122,7 @@ func (watcher *EventWatcher) Execute(recheckHeaders constants.TransformerExecuti return transactionsSyncErr } - transformErr := watcher.transformLogs(logs, header) + transformErr := watcher.transformLogs(logs, header.Id) if transformErr != nil { logrus.Error("Could not transform logs: ", transformErr) return transformErr @@ -131,7 +131,7 @@ func (watcher *EventWatcher) Execute(recheckHeaders constants.TransformerExecuti return err } -func (watcher *EventWatcher) transformLogs(logs []types.Log, header core.Header) error { +func (watcher *EventWatcher) transformLogs(logs []types.Log, headerID int64) error { chunkedLogs := watcher.Chunker.ChunkLogs(logs) // Can't quit early and mark as checked if there are no logs. If we are running continuousLogSync, @@ -139,7 +139,7 @@ func (watcher *EventWatcher) transformLogs(logs []types.Log, header core.Header) for _, t := range watcher.Transformers { transformerName := t.GetConfig().TransformerName logChunk := chunkedLogs[transformerName] - err := t.Execute(logChunk, header) + err := t.Execute(logChunk, headerID) if err != nil { logrus.Errorf("%v transformer failed to execute in watcher: %v", transformerName, err) return err From cb819fa9a60670f84be23ac1b60e96dcd3cb951c Mon Sep 17 00:00:00 2001 From: Rob Mulholand Date: Tue, 23 Jul 2019 14:57:52 -0500 Subject: [PATCH 03/21] Write event logs to database before transforming - enables decoupling event extraction/persistence from transformation - modifies event transformer, converter, and log chunker to accept payload that includes internal log database ID with log data - remove alias for transformer pkg as shared_t - remove unused mock watcher repository --- .../00029_create_header_sync_logs_table.sql | 21 +++ db/schema.sql | 70 ++++++++ libraries/shared/chunker/log_chunker.go | 23 ++- libraries/shared/chunker/log_chunker_test.go | 83 +++++----- libraries/shared/factories/event/converter.go | 4 +- .../shared/factories/event/transformer.go | 4 +- .../factories/event/transformer_test.go | 16 +- libraries/shared/mocks/converter.go | 10 +- libraries/shared/mocks/transformer.go | 19 ++- libraries/shared/mocks/watcher_repository.go | 69 -------- libraries/shared/repository/repository.go | 64 ++++++++ .../shared/repository/repository_test.go | 152 ++++++++++++++++++ libraries/shared/test_data/generic.go | 35 ++-- .../shared/transformer/event_transformer.go | 4 +- libraries/shared/watcher/event_watcher.go | 10 +- .../shared/watcher/event_watcher_test.go | 44 +++-- pkg/core/log.go | 9 ++ test_config/test_config.go | 1 + 18 files changed, 468 insertions(+), 170 deletions(-) create mode 100644 db/migrations/00029_create_header_sync_logs_table.sql delete mode 100644 libraries/shared/mocks/watcher_repository.go diff --git a/db/migrations/00029_create_header_sync_logs_table.sql b/db/migrations/00029_create_header_sync_logs_table.sql new file mode 100644 index 00000000..1d4cd4dd --- /dev/null +++ b/db/migrations/00029_create_header_sync_logs_table.sql @@ -0,0 +1,21 @@ +-- +goose Up +-- SQL in this section is executed when the migration is applied. +CREATE TABLE header_sync_logs +( + id SERIAL PRIMARY KEY, + header_id INTEGER NOT NULL REFERENCES headers (id) ON DELETE CASCADE, + address VARCHAR(66), + topics BYTEA[], + data BYTEA, + block_number BIGINT, + block_hash VARCHAR(66), + tx_hash VARCHAR(66), + tx_index INTEGER, + log_index INTEGER, + raw JSONB, + UNIQUE (header_id, tx_index, log_index) +); + +-- +goose Down +-- SQL in this section is executed when the migration is rolled back. +DROP TABLE header_sync_logs; \ No newline at end of file diff --git a/db/schema.sql b/db/schema.sql index d58aa9a2..a8f04eb3 100644 --- a/db/schema.sql +++ b/db/schema.sql @@ -296,6 +296,45 @@ CREATE SEQUENCE public.goose_db_version_id_seq ALTER SEQUENCE public.goose_db_version_id_seq OWNED BY public.goose_db_version.id; +-- +-- Name: header_sync_logs; Type: TABLE; Schema: public; Owner: - +-- + +CREATE TABLE public.header_sync_logs ( + id integer NOT NULL, + header_id integer NOT NULL, + address character varying(66), + topics bytea[], + data bytea, + block_number bigint, + block_hash character varying(66), + tx_hash character varying(66), + tx_index integer, + log_index integer, + raw jsonb +); + + +-- +-- Name: header_sync_logs_id_seq; Type: SEQUENCE; Schema: public; Owner: - +-- + +CREATE SEQUENCE public.header_sync_logs_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +-- +-- Name: header_sync_logs_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: - +-- + +ALTER SEQUENCE public.header_sync_logs_id_seq OWNED BY public.header_sync_logs.id; + + -- -- Name: header_sync_receipts; Type: TABLE; Schema: public; Owner: - -- @@ -650,6 +689,13 @@ ALTER TABLE ONLY public.full_sync_transactions ALTER COLUMN id SET DEFAULT nextv ALTER TABLE ONLY public.goose_db_version ALTER COLUMN id SET DEFAULT nextval('public.goose_db_version_id_seq'::regclass); +-- +-- Name: header_sync_logs id; Type: DEFAULT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.header_sync_logs ALTER COLUMN id SET DEFAULT nextval('public.header_sync_logs_id_seq'::regclass); + + -- -- Name: header_sync_receipts id; Type: DEFAULT; Schema: public; Owner: - -- @@ -787,6 +833,22 @@ ALTER TABLE ONLY public.goose_db_version ADD CONSTRAINT goose_db_version_pkey PRIMARY KEY (id); +-- +-- Name: header_sync_logs header_sync_logs_header_id_tx_index_log_index_key; Type: CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.header_sync_logs + ADD CONSTRAINT header_sync_logs_header_id_tx_index_log_index_key UNIQUE (header_id, tx_index, log_index); + + +-- +-- Name: header_sync_logs header_sync_logs_pkey; Type: CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.header_sync_logs + ADD CONSTRAINT header_sync_logs_pkey PRIMARY KEY (id); + + -- -- Name: header_sync_receipts header_sync_receipts_header_id_transaction_id_key; Type: CONSTRAINT; Schema: public; Owner: - -- @@ -965,6 +1027,14 @@ ALTER TABLE ONLY public.full_sync_transactions ADD CONSTRAINT full_sync_transactions_block_id_fkey FOREIGN KEY (block_id) REFERENCES public.blocks(id) ON DELETE CASCADE; +-- +-- Name: header_sync_logs header_sync_logs_header_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.header_sync_logs + ADD CONSTRAINT header_sync_logs_header_id_fkey FOREIGN KEY (header_id) REFERENCES public.headers(id) ON DELETE CASCADE; + + -- -- Name: header_sync_receipts header_sync_receipts_contract_address_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - -- diff --git a/libraries/shared/chunker/log_chunker.go b/libraries/shared/chunker/log_chunker.go index 87861ba4..c3a780ba 100644 --- a/libraries/shared/chunker/log_chunker.go +++ b/libraries/shared/chunker/log_chunker.go @@ -20,14 +20,13 @@ import ( "strings" "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/core/types" - - shared_t "github.com/vulcanize/vulcanizedb/libraries/shared/transformer" + "github.com/vulcanize/vulcanizedb/libraries/shared/transformer" + "github.com/vulcanize/vulcanizedb/pkg/core" ) type Chunker interface { - AddConfigs(transformerConfigs []shared_t.EventTransformerConfig) - ChunkLogs(logs []types.Log) map[string][]types.Log + AddConfigs(transformerConfigs []transformer.EventTransformerConfig) + ChunkLogs(logs []core.HeaderSyncLog) map[string][]core.HeaderSyncLog } type LogChunker struct { @@ -45,7 +44,7 @@ func NewLogChunker() *LogChunker { } // Configures the chunker by adding more addreses and topics to consider. -func (chunker *LogChunker) AddConfigs(transformerConfigs []shared_t.EventTransformerConfig) { +func (chunker *LogChunker) AddConfigs(transformerConfigs []transformer.EventTransformerConfig) { for _, config := range transformerConfigs { for _, address := range config.ContractAddresses { var lowerCaseAddress = strings.ToLower(address) @@ -56,15 +55,15 @@ func (chunker *LogChunker) AddConfigs(transformerConfigs []shared_t.EventTransfo } // Goes through an array of logs, associating relevant logs (matching addresses and topic) with transformers -func (chunker *LogChunker) ChunkLogs(logs []types.Log) map[string][]types.Log { - chunks := map[string][]types.Log{} +func (chunker *LogChunker) ChunkLogs(logs []core.HeaderSyncLog) map[string][]core.HeaderSyncLog { + chunks := map[string][]core.HeaderSyncLog{} for _, log := range logs { // Topic0 is not unique to each transformer, also need to consider the contract address - relevantTransformers := chunker.AddressToNames[strings.ToLower(log.Address.String())] + relevantTransformers := chunker.AddressToNames[strings.ToLower(log.Log.Address.Hex())] - for _, transformer := range relevantTransformers { - if chunker.NameToTopic0[transformer] == log.Topics[0] { - chunks[transformer] = append(chunks[transformer], log) + for _, t := range relevantTransformers { + if chunker.NameToTopic0[t] == log.Log.Topics[0] { + chunks[t] = append(chunks[t], log) } } } diff --git a/libraries/shared/chunker/log_chunker_test.go b/libraries/shared/chunker/log_chunker_test.go index daf23a50..629c04b3 100644 --- a/libraries/shared/chunker/log_chunker_test.go +++ b/libraries/shared/chunker/log_chunker_test.go @@ -23,34 +23,35 @@ import ( . "github.com/onsi/gomega" chunk "github.com/vulcanize/vulcanizedb/libraries/shared/chunker" - shared_t "github.com/vulcanize/vulcanizedb/libraries/shared/transformer" + "github.com/vulcanize/vulcanizedb/libraries/shared/transformer" + "github.com/vulcanize/vulcanizedb/pkg/core" ) var _ = Describe("Log chunker", func() { var ( - configs []shared_t.EventTransformerConfig + configs []transformer.EventTransformerConfig chunker *chunk.LogChunker ) BeforeEach(func() { - configA := shared_t.EventTransformerConfig{ + configA := transformer.EventTransformerConfig{ TransformerName: "TransformerA", ContractAddresses: []string{"0x00000000000000000000000000000000000000A1", "0x00000000000000000000000000000000000000A2"}, Topic: "0xA", } - configB := shared_t.EventTransformerConfig{ + configB := transformer.EventTransformerConfig{ TransformerName: "TransformerB", ContractAddresses: []string{"0x00000000000000000000000000000000000000B1"}, Topic: "0xB", } - configC := shared_t.EventTransformerConfig{ + configC := transformer.EventTransformerConfig{ TransformerName: "TransformerC", ContractAddresses: []string{"0x00000000000000000000000000000000000000A2"}, Topic: "0xC", } - configs = []shared_t.EventTransformerConfig{configA, configB, configC} + configs = []transformer.EventTransformerConfig{configA, configB, configC} chunker = chunk.NewLogChunker() chunker.AddConfigs(configs) }) @@ -73,24 +74,24 @@ var _ = Describe("Log chunker", func() { Describe("AddConfigs", func() { It("can add more configs later", func() { - configD := shared_t.EventTransformerConfig{ + configD := transformer.EventTransformerConfig{ TransformerName: "TransformerD", ContractAddresses: []string{"0x000000000000000000000000000000000000000D"}, Topic: "0xD", } - chunker.AddConfigs([]shared_t.EventTransformerConfig{configD}) + chunker.AddConfigs([]transformer.EventTransformerConfig{configD}) Expect(chunker.AddressToNames).To(ContainElement([]string{"TransformerD"})) Expect(chunker.NameToTopic0).To(ContainElement(common.HexToHash("0xD"))) }) It("lower cases address", func() { - configD := shared_t.EventTransformerConfig{ + configD := transformer.EventTransformerConfig{ TransformerName: "TransformerD", ContractAddresses: []string{"0x000000000000000000000000000000000000000D"}, Topic: "0xD", } - chunker.AddConfigs([]shared_t.EventTransformerConfig{configD}) + chunker.AddConfigs([]transformer.EventTransformerConfig{configD}) Expect(chunker.AddressToNames["0x000000000000000000000000000000000000000d"]).To(Equal([]string{"TransformerD"})) }) @@ -98,7 +99,7 @@ var _ = Describe("Log chunker", func() { Describe("ChunkLogs", func() { It("only associates logs with relevant topic0 and address to transformers", func() { - logs := []types.Log{log1, log2, log3, log4, log5} + logs := []core.HeaderSyncLog{log1, log2, log3, log4, log5} chunks := chunker.ChunkLogs(logs) Expect(chunks["TransformerA"]).To(And(ContainElement(log1), ContainElement(log4))) @@ -110,43 +111,53 @@ var _ = Describe("Log chunker", func() { var ( // Match TransformerA - log1 = types.Log{ - Address: common.HexToAddress("0xA1"), - Topics: []common.Hash{ - common.HexToHash("0xA"), - common.HexToHash("0xLogTopic1"), + log1 = core.HeaderSyncLog{ + Log: types.Log{ + Address: common.HexToAddress("0xA1"), + Topics: []common.Hash{ + common.HexToHash("0xA"), + common.HexToHash("0xLogTopic1"), + }, }, } // Match TransformerA address, but not topic0 - log2 = types.Log{ - Address: common.HexToAddress("0xA1"), - Topics: []common.Hash{ - common.HexToHash("0xB"), - common.HexToHash("0xLogTopic2"), + log2 = core.HeaderSyncLog{ + Log: types.Log{ + Address: common.HexToAddress("0xA1"), + Topics: []common.Hash{ + common.HexToHash("0xB"), + common.HexToHash("0xLogTopic2"), + }, }, } // Match TransformerA topic, but TransformerB address - log3 = types.Log{ - Address: common.HexToAddress("0xB1"), - Topics: []common.Hash{ - common.HexToHash("0xA"), - common.HexToHash("0xLogTopic3"), + log3 = core.HeaderSyncLog{ + Log: types.Log{ + Address: common.HexToAddress("0xB1"), + Topics: []common.Hash{ + common.HexToHash("0xA"), + common.HexToHash("0xLogTopic3"), + }, }, } // Match TransformerA, with the other address - log4 = types.Log{ - Address: common.HexToAddress("0xA2"), - Topics: []common.Hash{ - common.HexToHash("0xA"), - common.HexToHash("0xLogTopic4"), + log4 = core.HeaderSyncLog{ + Log: types.Log{ + Address: common.HexToAddress("0xA2"), + Topics: []common.Hash{ + common.HexToHash("0xA"), + common.HexToHash("0xLogTopic4"), + }, }, } // Match TransformerC, which shares address with TransformerA - log5 = types.Log{ - Address: common.HexToAddress("0xA2"), - Topics: []common.Hash{ - common.HexToHash("0xC"), - common.HexToHash("0xLogTopic5"), + log5 = core.HeaderSyncLog{ + Log: types.Log{ + Address: common.HexToAddress("0xA2"), + Topics: []common.Hash{ + common.HexToHash("0xC"), + common.HexToHash("0xLogTopic5"), + }, }, } ) diff --git a/libraries/shared/factories/event/converter.go b/libraries/shared/factories/event/converter.go index 4fb647f2..525b628a 100644 --- a/libraries/shared/factories/event/converter.go +++ b/libraries/shared/factories/event/converter.go @@ -16,9 +16,9 @@ package event -import "github.com/ethereum/go-ethereum/core/types" +import "github.com/vulcanize/vulcanizedb/pkg/core" type Converter interface { - ToEntities(contractAbi string, ethLog []types.Log) ([]interface{}, error) + ToEntities(contractAbi string, ethLog []core.HeaderSyncLog) ([]interface{}, error) ToModels([]interface{}) ([]interface{}, error) } diff --git a/libraries/shared/factories/event/transformer.go b/libraries/shared/factories/event/transformer.go index dd2c675a..19c80eec 100644 --- a/libraries/shared/factories/event/transformer.go +++ b/libraries/shared/factories/event/transformer.go @@ -17,10 +17,10 @@ package event import ( - "github.com/ethereum/go-ethereum/core/types" "github.com/sirupsen/logrus" "github.com/vulcanize/vulcanizedb/libraries/shared/transformer" + "github.com/vulcanize/vulcanizedb/pkg/core" "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres" ) @@ -35,7 +35,7 @@ func (transformer Transformer) NewTransformer(db *postgres.DB) transformer.Event return transformer } -func (transformer Transformer) Execute(logs []types.Log, headerID int64) error { +func (transformer Transformer) Execute(logs []core.HeaderSyncLog, headerID int64) error { transformerName := transformer.Config.TransformerName config := transformer.Config diff --git a/libraries/shared/factories/event/transformer_test.go b/libraries/shared/factories/event/transformer_test.go index 8a5fadab..c13b0e0b 100644 --- a/libraries/shared/factories/event/transformer_test.go +++ b/libraries/shared/factories/event/transformer_test.go @@ -19,7 +19,6 @@ package event_test import ( "math/rand" - "github.com/ethereum/go-ethereum/core/types" . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" @@ -38,7 +37,7 @@ var _ = Describe("Transformer", func() { t transformer.EventTransformer headerOne core.Header config = test_data.GenericTestConfig - logs = test_data.GenericTestLogs + logs []core.HeaderSyncLog ) BeforeEach(func() { @@ -52,6 +51,13 @@ var _ = Describe("Transformer", func() { }.NewTransformer(nil) headerOne = core.Header{Id: rand.Int63(), BlockNumber: rand.Int63()} + + logs = []core.HeaderSyncLog{{ + ID: 0, + HeaderID: headerOne.Id, + Log: test_data.GenericTestLog(), + Transformed: false, + }} }) It("sets the db", func() { @@ -59,14 +65,14 @@ var _ = Describe("Transformer", func() { }) It("marks header checked if no logs returned", func() { - err := t.Execute([]types.Log{}, headerOne.Id) + err := t.Execute([]core.HeaderSyncLog{}, headerOne.Id) Expect(err).NotTo(HaveOccurred()) repository.AssertMarkHeaderCheckedCalledWith(headerOne.Id) }) It("doesn't attempt to convert or persist an empty collection when there are no logs", func() { - err := t.Execute([]types.Log{}, headerOne.Id) + err := t.Execute([]core.HeaderSyncLog{}, headerOne.Id) Expect(err).NotTo(HaveOccurred()) Expect(converter.ToEntitiesCalledCounter).To(Equal(0)) @@ -84,7 +90,7 @@ var _ = Describe("Transformer", func() { It("returns error if marking header checked returns err", func() { repository.SetMarkHeaderCheckedError(fakes.FakeError) - err := t.Execute([]types.Log{}, headerOne.Id) + err := t.Execute([]core.HeaderSyncLog{}, headerOne.Id) Expect(err).To(HaveOccurred()) Expect(err).To(MatchError(fakes.FakeError)) diff --git a/libraries/shared/mocks/converter.go b/libraries/shared/mocks/converter.go index 0a687718..c0f42c4e 100644 --- a/libraries/shared/mocks/converter.go +++ b/libraries/shared/mocks/converter.go @@ -16,9 +16,7 @@ package mocks -import ( - "github.com/ethereum/go-ethereum/core/types" -) +import "github.com/vulcanize/vulcanizedb/pkg/core" type MockConverter struct { ToEntitiesError error @@ -27,7 +25,7 @@ type MockConverter struct { entityConverterError error modelConverterError error ContractAbi string - LogsToConvert []types.Log + LogsToConvert []core.HeaderSyncLog EntitiesToConvert []interface{} EntitiesToReturn []interface{} ModelsToReturn []interface{} @@ -35,9 +33,9 @@ type MockConverter struct { ToModelsCalledCounter int } -func (converter *MockConverter) ToEntities(contractAbi string, ethLogs []types.Log) ([]interface{}, error) { +func (converter *MockConverter) ToEntities(contractAbi string, ethLogs []core.HeaderSyncLog) ([]interface{}, error) { for _, log := range ethLogs { - converter.PassedContractAddresses = append(converter.PassedContractAddresses, log.Address.Hex()) + converter.PassedContractAddresses = append(converter.PassedContractAddresses, log.Log.Address.Hex()) } converter.ContractAbi = contractAbi converter.LogsToConvert = ethLogs diff --git a/libraries/shared/mocks/transformer.go b/libraries/shared/mocks/transformer.go index e8bf65e4..034bbe69 100644 --- a/libraries/shared/mocks/transformer.go +++ b/libraries/shared/mocks/transformer.go @@ -17,21 +17,20 @@ package mocks import ( - "github.com/ethereum/go-ethereum/core/types" - - shared_t "github.com/vulcanize/vulcanizedb/libraries/shared/transformer" + "github.com/vulcanize/vulcanizedb/libraries/shared/transformer" + "github.com/vulcanize/vulcanizedb/pkg/core" "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres" ) type MockTransformer struct { ExecuteWasCalled bool ExecuteError error - PassedLogs []types.Log + PassedLogs []core.HeaderSyncLog PassedHeaderID int64 - config shared_t.EventTransformerConfig + config transformer.EventTransformerConfig } -func (mh *MockTransformer) Execute(logs []types.Log, headerID int64) error { +func (mh *MockTransformer) Execute(logs []core.HeaderSyncLog, headerID int64) error { if mh.ExecuteError != nil { return mh.ExecuteError } @@ -41,19 +40,19 @@ func (mh *MockTransformer) Execute(logs []types.Log, headerID int64) error { return nil } -func (mh *MockTransformer) GetConfig() shared_t.EventTransformerConfig { +func (mh *MockTransformer) GetConfig() transformer.EventTransformerConfig { return mh.config } -func (mh *MockTransformer) SetTransformerConfig(config shared_t.EventTransformerConfig) { +func (mh *MockTransformer) SetTransformerConfig(config transformer.EventTransformerConfig) { mh.config = config } -func (mh *MockTransformer) FakeTransformerInitializer(db *postgres.DB) shared_t.EventTransformer { +func (mh *MockTransformer) FakeTransformerInitializer(db *postgres.DB) transformer.EventTransformer { return mh } -var FakeTransformerConfig = shared_t.EventTransformerConfig{ +var FakeTransformerConfig = transformer.EventTransformerConfig{ TransformerName: "FakeTransformer", ContractAddresses: []string{"FakeAddress"}, Topic: "FakeTopic", diff --git a/libraries/shared/mocks/watcher_repository.go b/libraries/shared/mocks/watcher_repository.go deleted file mode 100644 index fb0da575..00000000 --- a/libraries/shared/mocks/watcher_repository.go +++ /dev/null @@ -1,69 +0,0 @@ -// VulcanizeDB -// Copyright © 2019 Vulcanize - -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. - -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see . - -package mocks - -import ( - "github.com/vulcanize/vulcanizedb/pkg/core" - "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres" -) - -type MockWatcherRepository struct { - ReturnCheckedColumnNames []string - GetCheckedColumnNamesError error - GetCheckedColumnNamesCalled bool - - ReturnNotCheckedSQL string - CreateNotCheckedSQLCalled bool - - ReturnMissingHeaders []core.Header - MissingHeadersError error - MissingHeadersCalled bool -} - -func (repository *MockWatcherRepository) GetCheckedColumnNames(db *postgres.DB) ([]string, error) { - repository.GetCheckedColumnNamesCalled = true - if repository.GetCheckedColumnNamesError != nil { - return []string{}, repository.GetCheckedColumnNamesError - } - - return repository.ReturnCheckedColumnNames, nil -} - -func (repository *MockWatcherRepository) SetCheckedColumnNames(checkedColumnNames []string) { - repository.ReturnCheckedColumnNames = checkedColumnNames -} - -func (repository *MockWatcherRepository) CreateNotCheckedSQL(boolColumns []string) string { - repository.CreateNotCheckedSQLCalled = true - return repository.ReturnNotCheckedSQL -} - -func (repository *MockWatcherRepository) SetNotCheckedSQL(notCheckedSql string) { - repository.ReturnNotCheckedSQL = notCheckedSql -} - -func (repository *MockWatcherRepository) MissingHeaders(startingBlockNumber int64, endingBlockNumber int64, db *postgres.DB, notCheckedSQL string) ([]core.Header, error) { - if repository.MissingHeadersError != nil { - return []core.Header{}, repository.MissingHeadersError - } - repository.MissingHeadersCalled = true - return repository.ReturnMissingHeaders, nil -} - -func (repository *MockWatcherRepository) SetMissingHeaders(headers []core.Header) { - repository.ReturnMissingHeaders = headers -} diff --git a/libraries/shared/repository/repository.go b/libraries/shared/repository/repository.go index 2b7e2ce1..8aea8c49 100644 --- a/libraries/shared/repository/repository.go +++ b/libraries/shared/repository/repository.go @@ -18,15 +18,23 @@ package repository import ( "bytes" + "database/sql" "database/sql/driver" "fmt" + "github.com/ethereum/go-ethereum/core/types" "github.com/jmoiron/sqlx" + "github.com/lib/pq" + "github.com/sirupsen/logrus" "github.com/vulcanize/vulcanizedb/libraries/shared/constants" "github.com/vulcanize/vulcanizedb/pkg/core" "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres" ) +const insertHeaderSyncLogQuery = `INSERT INTO header_sync_logs + (header_id, address, topics, data, block_number, block_hash, tx_index, tx_hash, log_index, raw) + VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10) ON CONFLICT DO NOTHING RETURNING id` + func MarkHeaderChecked(headerID int64, db *postgres.DB, checkedHeadersColumn string) error { _, err := db.Exec(`INSERT INTO public.checked_headers (header_id, `+checkedHeadersColumn+`) VALUES ($1, $2) @@ -113,6 +121,62 @@ func CreateHeaderCheckedPredicateSQL(boolColumns []string, recheckHeaders consta } } +func CreateLogs(headerID int64, logs []types.Log, db *postgres.DB) ([]core.HeaderSyncLog, error) { + tx, txErr := db.Beginx() + if txErr != nil { + return nil, txErr + } + var results []core.HeaderSyncLog + for _, log := range logs { + logID, err := insertLog(headerID, log, tx) + if err != nil { + if logWasADuplicate(err) { + continue + } + rollbackErr := tx.Rollback() + if rollbackErr != nil { + logrus.Errorf("failed to rollback header sync log insert: %s", rollbackErr.Error()) + } + return nil, err + } + results = append(results, buildLog(logID, headerID, log)) + } + return results, tx.Commit() +} + +func logWasADuplicate(err error) bool { + return err == sql.ErrNoRows +} + +func insertLog(headerID int64, log types.Log, tx *sqlx.Tx) (int64, error) { + topics := buildTopics(log) + raw, jsonErr := log.MarshalJSON() + if jsonErr != nil { + return 0, jsonErr + } + var logID int64 + err := tx.QueryRowx(insertHeaderSyncLogQuery, headerID, log.Address.Hex(), topics, log.Data, log.BlockNumber, + log.BlockHash.Hex(), log.TxIndex, log.TxHash.Hex(), log.Index, raw).Scan(&logID) + return logID, err +} + +func buildLog(logID int64, headerID int64, log types.Log) core.HeaderSyncLog { + return core.HeaderSyncLog{ + ID: logID, + HeaderID: headerID, + Log: log, + Transformed: false, + } +} + +func buildTopics(log types.Log) pq.ByteaArray { + var topics pq.ByteaArray + for _, topic := range log.Topics { + topics = append(topics, topic.Bytes()) + } + return topics +} + func createHeaderCheckedPredicateSQLForMissingHeaders(boolColumns []string) string { var result bytes.Buffer result.WriteString(" (") diff --git a/libraries/shared/repository/repository_test.go b/libraries/shared/repository/repository_test.go index 183a940f..9eb33464 100644 --- a/libraries/shared/repository/repository_test.go +++ b/libraries/shared/repository/repository_test.go @@ -18,6 +18,10 @@ package repository_test import ( "fmt" + "github.com/ethereum/go-ethereum/common" + "github.com/ethereum/go-ethereum/core/types" + "github.com/lib/pq" + "github.com/vulcanize/vulcanizedb/libraries/shared/test_data" "math/rand" "strconv" @@ -291,6 +295,154 @@ var _ = Describe("Repository", func() { }) }) }) + + Describe("CreateHeaderSyncLogs", func() { + var headerID int64 + + type HeaderSyncLog struct { + ID int64 + HeaderID int64 `db:"header_id"` + Address string + Topics pq.ByteaArray + Data []byte + BlockNumber uint64 `db:"block_number"` + BlockHash string `db:"block_hash"` + TxHash string `db:"tx_hash"` + TxIndex uint `db:"tx_index"` + LogIndex uint `db:"log_index"` + Transformed bool + Raw []byte + } + + BeforeEach(func() { + db = test_config.NewTestDB(test_config.NewTestNode()) + test_config.CleanTestDB(db) + headerRepository := repositories.NewHeaderRepository(db) + var headerErr error + headerID, headerErr = headerRepository.CreateOrUpdateHeader(fakes.FakeHeader) + Expect(headerErr).NotTo(HaveOccurred()) + }) + + It("writes a log to the db", func() { + log := test_data.GenericTestLog() + + _, err := shared.CreateLogs(headerID, []types.Log{log}, db) + + Expect(err).NotTo(HaveOccurred()) + var dbLog HeaderSyncLog + lookupErr := db.Get(&dbLog, `SELECT * FROM header_sync_logs`) + Expect(lookupErr).NotTo(HaveOccurred()) + Expect(dbLog.ID).NotTo(BeZero()) + Expect(dbLog.HeaderID).To(Equal(headerID)) + Expect(dbLog.Address).To(Equal(log.Address.Hex())) + Expect(dbLog.Topics[0]).To(Equal(log.Topics[0].Bytes())) + Expect(dbLog.Topics[1]).To(Equal(log.Topics[1].Bytes())) + Expect(dbLog.Data).To(Equal(log.Data)) + Expect(dbLog.BlockNumber).To(Equal(log.BlockNumber)) + Expect(dbLog.BlockHash).To(Equal(log.BlockHash.Hex())) + Expect(dbLog.TxIndex).To(Equal(log.TxIndex)) + Expect(dbLog.TxHash).To(Equal(log.TxHash.Hex())) + Expect(dbLog.LogIndex).To(Equal(log.Index)) + expectedRaw, jsonErr := log.MarshalJSON() + Expect(jsonErr).NotTo(HaveOccurred()) + Expect(dbLog.Raw).To(MatchJSON(expectedRaw)) + Expect(dbLog.Transformed).To(BeFalse()) + }) + + It("writes several logs to the db", func() { + log1 := test_data.GenericTestLog() + log2 := test_data.GenericTestLog() + logs := []types.Log{log1, log2} + + _, err := shared.CreateLogs(headerID, logs, db) + + Expect(err).NotTo(HaveOccurred()) + var count int + lookupErr := db.Get(&count, `SELECT COUNT(*) FROM header_sync_logs`) + Expect(lookupErr).NotTo(HaveOccurred()) + Expect(count).To(Equal(len(logs))) + }) + + It("persists record that can be unpacked into types.Log", func() { + // important if we want to decouple log persistence from transforming and still make use of + // tools on types.Log like abi.Unpack + + log := test_data.GenericTestLog() + + _, err := shared.CreateLogs(headerID, []types.Log{log}, db) + + Expect(err).NotTo(HaveOccurred()) + var dbLog HeaderSyncLog + lookupErr := db.Get(&dbLog, `SELECT * FROM header_sync_logs`) + Expect(lookupErr).NotTo(HaveOccurred()) + + var logTopics []common.Hash + for _, topic := range dbLog.Topics { + logTopics = append(logTopics, common.BytesToHash(topic)) + } + + reconstructedLog := types.Log{ + Address: common.HexToAddress(dbLog.Address), + Topics: logTopics, + Data: dbLog.Data, + BlockNumber: dbLog.BlockNumber, + TxHash: common.HexToHash(dbLog.TxHash), + TxIndex: dbLog.TxIndex, + BlockHash: common.HexToHash(dbLog.BlockHash), + Index: dbLog.LogIndex, + Removed: false, + } + Expect(reconstructedLog).To(Equal(log)) + }) + + It("does not duplicate logs", func() { + log := test_data.GenericTestLog() + + results, err := shared.CreateLogs(headerID, []types.Log{log, log}, db) + + Expect(err).NotTo(HaveOccurred()) + Expect(len(results)).To(Equal(1)) + var count int + lookupErr := db.Get(&count, `SELECT COUNT(*) FROM header_sync_logs`) + Expect(lookupErr).NotTo(HaveOccurred()) + Expect(count).To(Equal(1)) + }) + + It("returns results with log id and header id for persisted logs", func() { + log1 := test_data.GenericTestLog() + log2 := test_data.GenericTestLog() + logs := []types.Log{log1, log2} + + results, err := shared.CreateLogs(headerID, logs, db) + + Expect(err).NotTo(HaveOccurred()) + Expect(len(results)).To(Equal(len(logs))) + var log1ID, log2ID int64 + lookupErr := db.Get(&log1ID, `SELECT id FROM header_sync_logs WHERE data = $1`, log1.Data) + Expect(lookupErr).NotTo(HaveOccurred()) + lookup2Err := db.Get(&log2ID, `SELECT id FROM header_sync_logs WHERE data = $1`, log2.Data) + Expect(lookup2Err).NotTo(HaveOccurred()) + Expect(results[0].ID).To(Or(Equal(log1ID), Equal(log2ID))) + Expect(results[1].ID).To(Or(Equal(log1ID), Equal(log2ID))) + Expect(results[0].HeaderID).To(Equal(headerID)) + Expect(results[1].HeaderID).To(Equal(headerID)) + }) + + It("returns results with properties for persisted logs", func() { + log1 := test_data.GenericTestLog() + log2 := test_data.GenericTestLog() + logs := []types.Log{log1, log2} + + results, err := shared.CreateLogs(headerID, logs, db) + + Expect(err).NotTo(HaveOccurred()) + Expect(len(results)).To(Equal(len(logs))) + Expect(results[0].Log).To(Or(Equal(log1), Equal(log2))) + Expect(results[1].Log).To(Or(Equal(log1), Equal(log2))) + Expect(results[0].Transformed).To(BeFalse()) + Expect(results[1].Transformed).To(BeFalse()) + }) + }) }) func getExpectedColumnNames() []string { diff --git a/libraries/shared/test_data/generic.go b/libraries/shared/test_data/generic.go index eb68f03a..f00b51a0 100644 --- a/libraries/shared/test_data/generic.go +++ b/libraries/shared/test_data/generic.go @@ -17,6 +17,7 @@ package test_data import ( + "github.com/ethereum/go-ethereum/common/hexutil" "math/rand" "time" @@ -30,28 +31,42 @@ type GenericModel struct{} type GenericEntity struct{} var startingBlockNumber = rand.Int63() -var topic = "0x" + randomString(64) -var address = "0x" + randomString(38) +var topic0 = "0x" + randomString(64) -var GenericTestLogs = []types.Log{{ - Address: common.HexToAddress(address), - Topics: []common.Hash{common.HexToHash(topic)}, - BlockNumber: uint64(startingBlockNumber), -}} +var GenericTestLog = func() types.Log { + return types.Log{ + Address: fakeAddress(), + Topics: []common.Hash{common.HexToHash(topic0), fakeHash()}, + Data: hexutil.MustDecode(fakeHash().Hex()), + BlockNumber: uint64(startingBlockNumber), + TxHash: fakeHash(), + TxIndex: uint(rand.Int31()), + BlockHash: fakeHash(), + Index: uint(rand.Int31()), + } +} var GenericTestConfig = transformer.EventTransformerConfig{ TransformerName: "generic-test-transformer", - ContractAddresses: []string{address}, + ContractAddresses: []string{fakeAddress().Hex()}, ContractAbi: randomString(100), - Topic: topic, + Topic: topic0, StartingBlockNumber: startingBlockNumber, EndingBlockNumber: startingBlockNumber + 1, } +func fakeAddress() common.Address { + return common.HexToAddress("0x" + randomString(40)) +} + +func fakeHash() common.Hash { + return common.HexToHash("0x" + randomString(64)) +} + func randomString(length int) string { var seededRand = rand.New( rand.NewSource(time.Now().UnixNano())) - charset := "abcdefghijklmnopqrstuvwxyz1234567890" + charset := "abcdef1234567890" b := make([]byte, length) for i := range b { b[i] = charset[seededRand.Intn(len(charset))] diff --git a/libraries/shared/transformer/event_transformer.go b/libraries/shared/transformer/event_transformer.go index 1bbb8b54..9e888ccf 100644 --- a/libraries/shared/transformer/event_transformer.go +++ b/libraries/shared/transformer/event_transformer.go @@ -18,13 +18,13 @@ package transformer import ( "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/core/types" + "github.com/vulcanize/vulcanizedb/pkg/core" "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres" ) type EventTransformer interface { - Execute(logs []types.Log, headerID int64) error + Execute(logs []core.HeaderSyncLog, headerID int64) error GetConfig() EventTransformerConfig } diff --git a/libraries/shared/watcher/event_watcher.go b/libraries/shared/watcher/event_watcher.go index 6c78484b..5d8f3308 100644 --- a/libraries/shared/watcher/event_watcher.go +++ b/libraries/shared/watcher/event_watcher.go @@ -21,7 +21,6 @@ import ( "github.com/vulcanize/vulcanizedb/libraries/shared/transactions" "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/core/types" "github.com/sirupsen/logrus" "github.com/vulcanize/vulcanizedb/libraries/shared/chunker" @@ -122,7 +121,12 @@ func (watcher *EventWatcher) Execute(recheckHeaders constants.TransformerExecuti return transactionsSyncErr } - transformErr := watcher.transformLogs(logs, header.Id) + persistedLogs, createLogsErr := repository.CreateLogs(header.Id, logs, watcher.DB) + if createLogsErr != nil { + logrus.Errorf("error persisting logs: %s", createLogsErr.Error()) + } + + transformErr := watcher.transformLogs(persistedLogs, header.Id) if transformErr != nil { logrus.Error("Could not transform logs: ", transformErr) return transformErr @@ -131,7 +135,7 @@ func (watcher *EventWatcher) Execute(recheckHeaders constants.TransformerExecuti return err } -func (watcher *EventWatcher) transformLogs(logs []types.Log, headerID int64) error { +func (watcher *EventWatcher) transformLogs(logs []core.HeaderSyncLog, headerID int64) error { chunkedLogs := watcher.Chunker.ChunkLogs(logs) // Can't quit early and mark as checked if there are no logs. If we are running continuousLogSync, diff --git a/libraries/shared/watcher/event_watcher_test.go b/libraries/shared/watcher/event_watcher_test.go index 6edd168d..b3e606c1 100644 --- a/libraries/shared/watcher/event_watcher_test.go +++ b/libraries/shared/watcher/event_watcher_test.go @@ -18,7 +18,6 @@ package watcher_test import ( "errors" - "github.com/ethereum/go-ethereum" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/core/types" @@ -106,7 +105,6 @@ var _ = Describe("Watcher", func() { w watcher.EventWatcher mockBlockChain fakes.MockBlockChain headerRepository repositories.HeaderRepository - repository mocks.MockWatcherRepository ) BeforeEach(func() { @@ -117,14 +115,12 @@ var _ = Describe("Watcher", func() { _, err := headerRepository.CreateOrUpdateHeader(fakes.FakeHeader) Expect(err).NotTo(HaveOccurred()) - repository = mocks.MockWatcherRepository{} w = watcher.NewEventWatcher(db, &mockBlockChain) }) It("syncs transactions for fetched logs", func() { fakeTransformer := &mocks.MockTransformer{} w.AddTransformers([]transformer.EventTransformerInitializer{fakeTransformer.FakeTransformerInitializer}) - repository.SetMissingHeaders([]core.Header{fakes.FakeHeader}) mockTransactionSyncer := &fakes.MockTransactionSyncer{} w.Syncer = mockTransactionSyncer @@ -137,7 +133,6 @@ var _ = Describe("Watcher", func() { It("returns error if syncing transactions fails", func() { fakeTransformer := &mocks.MockTransformer{} w.AddTransformers([]transformer.EventTransformerInitializer{fakeTransformer.FakeTransformerInitializer}) - repository.SetMissingHeaders([]core.Header{fakes.FakeHeader}) mockTransactionSyncer := &fakes.MockTransactionSyncer{} mockTransactionSyncer.SyncTransactionsError = fakes.FakeError w.Syncer = mockTransactionSyncer @@ -148,10 +143,30 @@ var _ = Describe("Watcher", func() { Expect(err).To(MatchError(fakes.FakeError)) }) + It("persists fetched logs", func() { + fakeTransformer := &mocks.MockTransformer{} + transformerConfig := transformer.EventTransformerConfig{TransformerName: "transformerA", + ContractAddresses: []string{"0x000000000000000000000000000000000000000A"}, + Topic: "0xA"} + fakeTransformer.SetTransformerConfig(transformerConfig) + w.AddTransformers([]transformer.EventTransformerInitializer{fakeTransformer.FakeTransformerInitializer}) + log := types.Log{Address: common.HexToAddress("0xA"), + Topics: []common.Hash{common.HexToHash("0xA")}, + Index: 0, + } + mockBlockChain.SetGetEthLogsWithCustomQueryReturnLogs([]types.Log{log}) + + err := w.Execute(constants.HeaderMissing) + + Expect(err).NotTo(HaveOccurred()) + Expect(len(fakeTransformer.PassedLogs)).NotTo(BeZero()) + Expect(fakeTransformer.PassedLogs[0].ID).NotTo(BeZero()) + Expect(fakeTransformer.PassedLogs[0].Log).To(Equal(log)) + }) + It("executes each transformer", func() { fakeTransformer := &mocks.MockTransformer{} w.AddTransformers([]transformer.EventTransformerInitializer{fakeTransformer.FakeTransformerInitializer}) - repository.SetMissingHeaders([]core.Header{fakes.FakeHeader}) err := w.Execute(constants.HeaderMissing) Expect(err).NotTo(HaveOccurred()) @@ -161,7 +176,6 @@ var _ = Describe("Watcher", func() { It("returns an error if transformer returns an error", func() { fakeTransformer := &mocks.MockTransformer{ExecuteError: errors.New("Something bad happened")} w.AddTransformers([]transformer.EventTransformerInitializer{fakeTransformer.FakeTransformerInitializer}) - repository.SetMissingHeaders([]core.Header{fakes.FakeHeader}) err := w.Execute(constants.HeaderMissing) Expect(err).To(HaveOccurred()) @@ -183,26 +197,30 @@ var _ = Describe("Watcher", func() { transformerB.SetTransformerConfig(configB) logA := types.Log{Address: common.HexToAddress("0xA"), - Topics: []common.Hash{common.HexToHash("0xA")}} + Topics: []common.Hash{common.HexToHash("0xA")}, + Index: 0, + } logB := types.Log{Address: common.HexToAddress("0xB"), - Topics: []common.Hash{common.HexToHash("0xB")}} + Topics: []common.Hash{common.HexToHash("0xB")}, + Index: 1, + } mockBlockChain.SetGetEthLogsWithCustomQueryReturnLogs([]types.Log{logA, logB}) - repository.SetMissingHeaders([]core.Header{fakes.FakeHeader}) w = watcher.NewEventWatcher(db, &mockBlockChain) w.AddTransformers([]transformer.EventTransformerInitializer{ transformerA.FakeTransformerInitializer, transformerB.FakeTransformerInitializer}) err := w.Execute(constants.HeaderMissing) Expect(err).NotTo(HaveOccurred()) - Expect(transformerA.PassedLogs).To(Equal([]types.Log{logA})) - Expect(transformerB.PassedLogs).To(Equal([]types.Log{logB})) + Expect(len(transformerA.PassedLogs)).NotTo(BeZero()) + Expect(transformerA.PassedLogs[0].Log).To(Equal(logA)) + Expect(len(transformerB.PassedLogs)).NotTo(BeZero()) + Expect(transformerB.PassedLogs[0].Log).To(Equal(logB)) }) Describe("uses the LogFetcher correctly:", func() { var fakeTransformer mocks.MockTransformer BeforeEach(func() { - repository.SetMissingHeaders([]core.Header{fakes.FakeHeader}) fakeTransformer = mocks.MockTransformer{} }) diff --git a/pkg/core/log.go b/pkg/core/log.go index 16962055..d1d576d6 100644 --- a/pkg/core/log.go +++ b/pkg/core/log.go @@ -16,6 +16,8 @@ package core +import "github.com/ethereum/go-ethereum/core/types" + type Log struct { BlockNumber int64 TxHash string @@ -24,3 +26,10 @@ type Log struct { Index int64 Data string } + +type HeaderSyncLog struct { + ID int64 + HeaderID int64 `db:"header_id"` + Log types.Log + Transformed bool +} diff --git a/test_config/test_config.go b/test_config/test_config.go index b8f62b72..0d3ee032 100644 --- a/test_config/test_config.go +++ b/test_config/test_config.go @@ -113,6 +113,7 @@ func CleanTestDB(db *postgres.DB) { db.MustExec("DELETE FROM full_sync_receipts") db.MustExec("DELETE FROM full_sync_transactions") db.MustExec("DELETE FROM goose_db_version") + db.MustExec("DELETE FROM header_sync_logs") db.MustExec("DELETE FROM header_sync_receipts") db.MustExec("DELETE FROM header_sync_transactions") db.MustExec("DELETE FROM headers") From d496dad33c3cdb1260f3df619fc024604257d8db Mon Sep 17 00:00:00 2001 From: Rob Mulholand Date: Tue, 6 Aug 2019 16:57:08 -0500 Subject: [PATCH 04/21] Decouple log extraction from transformer delegation - limit missing headers results set to 100 so that extraction doesn't excessively block delegation - wrap checked headers functions in repository struct - move storage repository to factory, to correspond with event repository path - remove unused files - reformat sql - remove line breaks in imports --- db/migrations/00001_create_blocks_table.sql | 35 +- ...02_create_full_sync_transactions_table.sql | 27 +- .../00004_create_contracts_table.sql | 6 +- db/migrations/00005_create_nodes_table.sql | 15 +- db/migrations/00006_add_node_fk_to_blocks.sql | 12 +- .../00007_create_full_sync_logs_table.sql | 26 +- ...ock_number_unique_constraint_to_blocks.sql | 4 +- ..._tx_to_index_to_full_sync_transactions.sql | 2 +- ...x_from_index_to_full_sync_transactions.sql | 2 +- .../00016_add_receipts_fk_to_logs.sql | 16 +- db/migrations/00017_create_log_filters.sql | 23 +- .../00018_create_watched_event_logs.sql | 50 ++- ...update_log_filters_to_block_constraint.sql | 8 +- db/migrations/00020_rename_node_table.sql | 53 ++- .../00021_associate_receipts_with_blocks.sql | 36 +- ...022_add_eth_node_fingerprint_to_blocks.sql | 10 +- db/migrations/00023_create_headers_table.sql | 17 +- .../00024_create_checked_headers_table.sql | 8 +- db/migrations/00025_create_queued_storage.sql | 17 +- ..._create_header_sync_transactions_table.sql | 29 +- db/migrations/00028_create_uncles_table.sql | 23 +- .../00029_create_header_sync_logs_table.sql | 1 + db/schema.sql | 6 +- integration_test/contract_test.go | 6 +- libraries/shared/chunker/log_chunker.go | 19 +- libraries/shared/chunker/log_chunker_test.go | 18 +- libraries/shared/constants/checked_headers.go | 2 +- .../shared/factories/event/repository.go | 7 +- .../shared/factories/event/transformer.go | 16 +- .../factories/event/transformer_test.go | 46 +- .../storage/repository.go} | 4 +- .../shared/factories/storage/transformer.go | 3 +- libraries/shared/fetcher/log_fetcher.go | 1 + libraries/shared/logs/delegator.go | 78 ++++ libraries/shared/logs/delegator_test.go | 134 ++++++ libraries/shared/logs/extractor.go | 122 ++++++ libraries/shared/logs/extractor_test.go | 310 +++++++++++++ libraries/shared/logs/logs_suite_test.go | 35 ++ .../{converter.go => event_converter.go} | 0 .../{repository.go => event_repository.go} | 32 +- .../{transformer.go => event_transformer.go} | 31 +- libraries/shared/mocks/log_delegator.go | 36 ++ libraries/shared/mocks/log_extractor.go | 37 ++ .../{log_note_converter.go => log_fetcher.go} | 32 +- .../{mappings.go => storage_mappings.go} | 0 libraries/shared/repository/repository.go | 186 +------- .../shared/repository/repository_test.go | 408 +----------------- libraries/shared/test_data/generic.go | 8 +- .../shared/transformer/event_transformer.go | 3 +- libraries/shared/watcher/event_watcher.go | 142 ++---- .../shared/watcher/event_watcher_test.go | 256 +++-------- .../full/retriever/block_retriever_test.go | 8 +- .../shared/helpers/test_helpers/database.go | 18 +- .../helpers/test_helpers/mocks/entities.go | 8 +- .../shared/repository/event_repository.go | 2 +- .../repository/event_repository_test.go | 4 +- pkg/core/blockchain.go | 2 +- pkg/core/log.go | 2 +- pkg/core/receipts.go | 2 +- pkg/datastore/postgres/postgres_test.go | 6 +- .../postgres/repositories/block_repository.go | 22 +- .../checked_headers_repository.go | 70 +++ .../checked_headers_repository_test.go | 270 ++++++++++++ ...ository.go => full_sync_log_repository.go} | 26 +- ...st.go => full_sync_log_repository_test.go} | 16 +- .../full_sync_receipt_repository.go | 18 +- .../full_sync_receipt_repository_test.go | 10 +- .../header_sync_log_repository.go | 134 ++++++ .../header_sync_log_repository_test.go | 203 +++++++++ .../watched_events_repository_test.go | 8 +- pkg/datastore/repository.go | 17 +- pkg/fakes/mock_blockchain.go | 4 +- pkg/fakes/mock_checked_headers_repository.go | 43 ++ pkg/fakes/mock_header_sync_log_repository.go | 42 ++ pkg/geth/blockchain.go | 6 +- pkg/geth/blockchain_test.go | 4 +- ...onverter.go => full_sync_log_converter.go} | 8 +- ...est.go => full_sync_log_converter_test.go} | 6 +- .../converters/common/receipt_converter.go | 4 +- .../common/receipt_converter_test.go | 4 +- test_config/test_config.go | 12 +- 81 files changed, 2049 insertions(+), 1328 deletions(-) rename libraries/shared/{repository/storage_repository.go => factories/storage/repository.go} (94%) create mode 100644 libraries/shared/logs/delegator.go create mode 100644 libraries/shared/logs/delegator_test.go create mode 100644 libraries/shared/logs/extractor.go create mode 100644 libraries/shared/logs/extractor_test.go create mode 100644 libraries/shared/logs/logs_suite_test.go rename libraries/shared/mocks/{converter.go => event_converter.go} (100%) rename libraries/shared/mocks/{repository.go => event_repository.go} (63%) rename libraries/shared/mocks/{transformer.go => event_transformer.go} (63%) create mode 100644 libraries/shared/mocks/log_delegator.go create mode 100644 libraries/shared/mocks/log_extractor.go rename libraries/shared/mocks/{log_note_converter.go => log_fetcher.go} (55%) rename libraries/shared/mocks/{mappings.go => storage_mappings.go} (100%) create mode 100644 pkg/datastore/postgres/repositories/checked_headers_repository.go create mode 100644 pkg/datastore/postgres/repositories/checked_headers_repository_test.go rename pkg/datastore/postgres/repositories/{logs_repository.go => full_sync_log_repository.go} (81%) rename pkg/datastore/postgres/repositories/{logs_repository_test.go => full_sync_log_repository_test.go} (95%) create mode 100644 pkg/datastore/postgres/repositories/header_sync_log_repository.go create mode 100644 pkg/datastore/postgres/repositories/header_sync_log_repository_test.go create mode 100644 pkg/fakes/mock_checked_headers_repository.go create mode 100644 pkg/fakes/mock_header_sync_log_repository.go rename pkg/geth/converters/common/{log_converter.go => full_sync_log_converter.go} (89%) rename pkg/geth/converters/common/{log_converter_test.go => full_sync_log_converter_test.go} (95%) diff --git a/db/migrations/00001_create_blocks_table.sql b/db/migrations/00001_create_blocks_table.sql index c11cf1de..fb85c820 100644 --- a/db/migrations/00001_create_blocks_table.sql +++ b/db/migrations/00001_create_blocks_table.sql @@ -1,21 +1,22 @@ -- +goose Up -CREATE TABLE public.blocks ( - id SERIAL PRIMARY KEY, - difficulty BIGINT, - extra_data VARCHAR, - gas_limit BIGINT, - gas_used BIGINT, - hash VARCHAR(66), - miner VARCHAR(42), - nonce VARCHAR(20), - "number" BIGINT, - parent_hash VARCHAR(66), - reward NUMERIC, - uncles_reward NUMERIC, - "size" VARCHAR, - "time" BIGINT, - is_final BOOLEAN, - uncle_hash VARCHAR(66) +CREATE TABLE public.blocks +( + id SERIAL PRIMARY KEY, + difficulty BIGINT, + extra_data VARCHAR, + gas_limit BIGINT, + gas_used BIGINT, + hash VARCHAR(66), + miner VARCHAR(42), + nonce VARCHAR(20), + "number" BIGINT, + parent_hash VARCHAR(66), + reward NUMERIC, + uncles_reward NUMERIC, + "size" VARCHAR, + "time" BIGINT, + is_final BOOLEAN, + uncle_hash VARCHAR(66) ); diff --git a/db/migrations/00002_create_full_sync_transactions_table.sql b/db/migrations/00002_create_full_sync_transactions_table.sql index 72548c95..84c3d433 100644 --- a/db/migrations/00002_create_full_sync_transactions_table.sql +++ b/db/migrations/00002_create_full_sync_transactions_table.sql @@ -1,17 +1,18 @@ -- +goose Up -CREATE TABLE full_sync_transactions ( - id SERIAL PRIMARY KEY, - block_id INTEGER NOT NULL REFERENCES blocks(id) ON DELETE CASCADE, - gas_limit NUMERIC, - gas_price NUMERIC, - hash VARCHAR(66), - input_data BYTEA, - nonce NUMERIC, - raw BYTEA, - tx_from VARCHAR(66), - tx_index INTEGER, - tx_to VARCHAR(66), - "value" NUMERIC +CREATE TABLE full_sync_transactions +( + id SERIAL PRIMARY KEY, + block_id INTEGER NOT NULL REFERENCES blocks (id) ON DELETE CASCADE, + gas_limit NUMERIC, + gas_price NUMERIC, + hash VARCHAR(66), + input_data BYTEA, + nonce NUMERIC, + raw BYTEA, + tx_from VARCHAR(66), + tx_index INTEGER, + tx_to VARCHAR(66), + "value" NUMERIC ); -- +goose Down diff --git a/db/migrations/00004_create_contracts_table.sql b/db/migrations/00004_create_contracts_table.sql index 3ff43bef..9d288a79 100644 --- a/db/migrations/00004_create_contracts_table.sql +++ b/db/migrations/00004_create_contracts_table.sql @@ -1,9 +1,9 @@ -- +goose Up CREATE TABLE watched_contracts ( - contract_id SERIAL PRIMARY KEY, - contract_abi json, - contract_hash VARCHAR(66) UNIQUE + contract_id SERIAL PRIMARY KEY, + contract_abi json, + contract_hash VARCHAR(66) UNIQUE ); -- +goose Down diff --git a/db/migrations/00005_create_nodes_table.sql b/db/migrations/00005_create_nodes_table.sql index c2c0c7b8..4a38a934 100644 --- a/db/migrations/00005_create_nodes_table.sql +++ b/db/migrations/00005_create_nodes_table.sql @@ -1,11 +1,12 @@ -- +goose Up -CREATE TABLE nodes ( - id SERIAL PRIMARY KEY, - client_name VARCHAR, - genesis_block VARCHAR(66), - network_id NUMERIC, - node_id VARCHAR(128), - CONSTRAINT node_uc UNIQUE (genesis_block, network_id, node_id) +CREATE TABLE nodes +( + id SERIAL PRIMARY KEY, + client_name VARCHAR, + genesis_block VARCHAR(66), + network_id NUMERIC, + node_id VARCHAR(128), + CONSTRAINT node_uc UNIQUE (genesis_block, network_id, node_id) ); -- +goose Down diff --git a/db/migrations/00006_add_node_fk_to_blocks.sql b/db/migrations/00006_add_node_fk_to_blocks.sql index 4db8c03f..dc8e0545 100644 --- a/db/migrations/00006_add_node_fk_to_blocks.sql +++ b/db/migrations/00006_add_node_fk_to_blocks.sql @@ -1,11 +1,11 @@ -- +goose Up ALTER TABLE blocks - ADD COLUMN node_id INTEGER NOT NULL, - ADD CONSTRAINT node_fk -FOREIGN KEY (node_id) -REFERENCES nodes (id) -ON DELETE CASCADE; + ADD COLUMN node_id INTEGER NOT NULL, + ADD CONSTRAINT node_fk + FOREIGN KEY (node_id) + REFERENCES nodes (id) + ON DELETE CASCADE; -- +goose Down ALTER TABLE blocks - DROP COLUMN node_id; + DROP COLUMN node_id; diff --git a/db/migrations/00007_create_full_sync_logs_table.sql b/db/migrations/00007_create_full_sync_logs_table.sql index 985bb3cc..b2dc154d 100644 --- a/db/migrations/00007_create_full_sync_logs_table.sql +++ b/db/migrations/00007_create_full_sync_logs_table.sql @@ -1,19 +1,19 @@ -- +goose Up -CREATE TABLE full_sync_logs ( - id SERIAL PRIMARY KEY, - block_number BIGINT, - address VARCHAR(66), - tx_hash VARCHAR(66), - index BIGINT, - topic0 VARCHAR(66), - topic1 VARCHAR(66), - topic2 VARCHAR(66), - topic3 VARCHAR(66), - data TEXT, - CONSTRAINT full_sync_log_uc UNIQUE (block_number, index) +CREATE TABLE full_sync_logs +( + id SERIAL PRIMARY KEY, + block_number BIGINT, + address VARCHAR(66), + tx_hash VARCHAR(66), + index BIGINT, + topic0 VARCHAR(66), + topic1 VARCHAR(66), + topic2 VARCHAR(66), + topic3 VARCHAR(66), + data TEXT, + CONSTRAINT full_sync_log_uc UNIQUE (block_number, index) ); - -- +goose Down DROP TABLE full_sync_logs; diff --git a/db/migrations/00008_add_node_block_number_unique_constraint_to_blocks.sql b/db/migrations/00008_add_node_block_number_unique_constraint_to_blocks.sql index 30be138d..40bbbb7f 100644 --- a/db/migrations/00008_add_node_block_number_unique_constraint_to_blocks.sql +++ b/db/migrations/00008_add_node_block_number_unique_constraint_to_blocks.sql @@ -1,7 +1,7 @@ -- +goose Up ALTER TABLE blocks - ADD CONSTRAINT node_id_block_number_uc UNIQUE (number, node_id); + ADD CONSTRAINT node_id_block_number_uc UNIQUE (number, node_id); -- +goose Down ALTER TABLE blocks - DROP CONSTRAINT node_id_block_number_uc; + DROP CONSTRAINT node_id_block_number_uc; diff --git a/db/migrations/00011_add_tx_to_index_to_full_sync_transactions.sql b/db/migrations/00011_add_tx_to_index_to_full_sync_transactions.sql index cf2977d1..51d66c12 100644 --- a/db/migrations/00011_add_tx_to_index_to_full_sync_transactions.sql +++ b/db/migrations/00011_add_tx_to_index_to_full_sync_transactions.sql @@ -1,5 +1,5 @@ -- +goose Up -CREATE INDEX tx_to_index ON full_sync_transactions(tx_to); +CREATE INDEX tx_to_index ON full_sync_transactions (tx_to); -- +goose Down DROP INDEX tx_to_index; diff --git a/db/migrations/00012_add_tx_from_index_to_full_sync_transactions.sql b/db/migrations/00012_add_tx_from_index_to_full_sync_transactions.sql index fa6f0543..8db62a3d 100644 --- a/db/migrations/00012_add_tx_from_index_to_full_sync_transactions.sql +++ b/db/migrations/00012_add_tx_from_index_to_full_sync_transactions.sql @@ -1,5 +1,5 @@ -- +goose Up -CREATE INDEX tx_from_index ON full_sync_transactions(tx_from); +CREATE INDEX tx_from_index ON full_sync_transactions (tx_from); -- +goose Down DROP INDEX tx_from_index; diff --git a/db/migrations/00016_add_receipts_fk_to_logs.sql b/db/migrations/00016_add_receipts_fk_to_logs.sql index 336f911e..b2729597 100644 --- a/db/migrations/00016_add_receipts_fk_to_logs.sql +++ b/db/migrations/00016_add_receipts_fk_to_logs.sql @@ -3,21 +3,21 @@ ALTER TABLE full_sync_logs DROP CONSTRAINT full_sync_log_uc; ALTER TABLE full_sync_logs - ADD COLUMN receipt_id INT; + ADD COLUMN receipt_id INT; ALTER TABLE full_sync_logs - ADD CONSTRAINT receipts_fk -FOREIGN KEY (receipt_id) -REFERENCES full_sync_receipts (id) -ON DELETE CASCADE; + ADD CONSTRAINT receipts_fk + FOREIGN KEY (receipt_id) + REFERENCES full_sync_receipts (id) + ON DELETE CASCADE; -- +goose Down ALTER TABLE full_sync_logs - DROP CONSTRAINT receipts_fk; + DROP CONSTRAINT receipts_fk; ALTER TABLE full_sync_logs - DROP COLUMN receipt_id; + DROP COLUMN receipt_id; ALTER TABLE full_sync_logs - ADD CONSTRAINT full_sync_log_uc UNIQUE (block_number, index); + ADD CONSTRAINT full_sync_log_uc UNIQUE (block_number, index); diff --git a/db/migrations/00017_create_log_filters.sql b/db/migrations/00017_create_log_filters.sql index 0367f5e5..28772b01 100644 --- a/db/migrations/00017_create_log_filters.sql +++ b/db/migrations/00017_create_log_filters.sql @@ -1,15 +1,16 @@ -- +goose Up -CREATE TABLE log_filters ( - id SERIAL, - name VARCHAR NOT NULL CHECK (name <> ''), - from_block BIGINT CHECK (from_block >= 0), - to_block BIGINT CHECK (from_block >= 0), - address VARCHAR(66), - topic0 VARCHAR(66), - topic1 VARCHAR(66), - topic2 VARCHAR(66), - topic3 VARCHAR(66), - CONSTRAINT name_uc UNIQUE (name) +CREATE TABLE log_filters +( + id SERIAL, + name VARCHAR NOT NULL CHECK (name <> ''), + from_block BIGINT CHECK (from_block >= 0), + to_block BIGINT CHECK (from_block >= 0), + address VARCHAR(66), + topic0 VARCHAR(66), + topic1 VARCHAR(66), + topic2 VARCHAR(66), + topic3 VARCHAR(66), + CONSTRAINT name_uc UNIQUE (name) ); -- +goose Down diff --git a/db/migrations/00018_create_watched_event_logs.sql b/db/migrations/00018_create_watched_event_logs.sql index cdc1bffe..697c788d 100644 --- a/db/migrations/00018_create_watched_event_logs.sql +++ b/db/migrations/00018_create_watched_event_logs.sql @@ -1,33 +1,31 @@ -- +goose Up CREATE VIEW block_stats AS - SELECT - max(block_number) AS max_block, - min(block_number) AS min_block - FROM full_sync_logs; +SELECT max(block_number) AS max_block, + min(block_number) AS min_block +FROM full_sync_logs; CREATE VIEW watched_event_logs AS - SELECT - log_filters.name, - full_sync_logs.id, - block_number, - full_sync_logs.address, - tx_hash, - index, - full_sync_logs.topic0, - full_sync_logs.topic1, - full_sync_logs.topic2, - full_sync_logs.topic3, - data, - receipt_id - FROM log_filters - CROSS JOIN block_stats - JOIN full_sync_logs ON full_sync_logs.address = log_filters.address - AND full_sync_logs.block_number >= coalesce(log_filters.from_block, block_stats.min_block) - AND full_sync_logs.block_number <= coalesce(log_filters.to_block, block_stats.max_block) - WHERE (log_filters.topic0 = full_sync_logs.topic0 OR log_filters.topic0 ISNULL) - AND (log_filters.topic1 = full_sync_logs.topic1 OR log_filters.topic1 ISNULL) - AND (log_filters.topic2 = full_sync_logs.topic2 OR log_filters.topic2 ISNULL) - AND (log_filters.topic3 = full_sync_logs.topic3 OR log_filters.topic3 ISNULL); +SELECT log_filters.name, + full_sync_logs.id, + block_number, + full_sync_logs.address, + tx_hash, + index, + full_sync_logs.topic0, + full_sync_logs.topic1, + full_sync_logs.topic2, + full_sync_logs.topic3, + data, + receipt_id +FROM log_filters + CROSS JOIN block_stats + JOIN full_sync_logs ON full_sync_logs.address = log_filters.address + AND full_sync_logs.block_number >= coalesce(log_filters.from_block, block_stats.min_block) + AND full_sync_logs.block_number <= coalesce(log_filters.to_block, block_stats.max_block) +WHERE (log_filters.topic0 = full_sync_logs.topic0 OR log_filters.topic0 ISNULL) + AND (log_filters.topic1 = full_sync_logs.topic1 OR log_filters.topic1 ISNULL) + AND (log_filters.topic2 = full_sync_logs.topic2 OR log_filters.topic2 ISNULL) + AND (log_filters.topic3 = full_sync_logs.topic3 OR log_filters.topic3 ISNULL); -- +goose Down DROP VIEW watched_event_logs; diff --git a/db/migrations/00019_update_log_filters_to_block_constraint.sql b/db/migrations/00019_update_log_filters_to_block_constraint.sql index 512a44db..2d43618c 100644 --- a/db/migrations/00019_update_log_filters_to_block_constraint.sql +++ b/db/migrations/00019_update_log_filters_to_block_constraint.sql @@ -1,14 +1,14 @@ -- +goose Up ALTER TABLE log_filters - DROP CONSTRAINT log_filters_from_block_check1; + DROP CONSTRAINT log_filters_from_block_check1; ALTER TABLE log_filters - ADD CONSTRAINT log_filters_to_block_check CHECK (to_block >= 0); + ADD CONSTRAINT log_filters_to_block_check CHECK (to_block >= 0); -- +goose Down ALTER TABLE log_filters - DROP CONSTRAINT log_filters_to_block_check; + DROP CONSTRAINT log_filters_to_block_check; ALTER TABLE log_filters - ADD CONSTRAINT log_filters_from_block_check1 CHECK (to_block >= 0); + ADD CONSTRAINT log_filters_from_block_check1 CHECK (to_block >= 0); diff --git a/db/migrations/00020_rename_node_table.sql b/db/migrations/00020_rename_node_table.sql index 061fda57..c1592823 100644 --- a/db/migrations/00020_rename_node_table.sql +++ b/db/migrations/00020_rename_node_table.sql @@ -1,43 +1,52 @@ -- +goose Up -ALTER TABLE public.nodes RENAME TO eth_nodes; +ALTER TABLE public.nodes + RENAME TO eth_nodes; -ALTER TABLE public.eth_nodes RENAME COLUMN node_id TO eth_node_id; - -ALTER TABLE public.eth_nodes DROP CONSTRAINT node_uc; ALTER TABLE public.eth_nodes - ADD CONSTRAINT eth_node_uc UNIQUE (genesis_block, network_id, eth_node_id); + RENAME COLUMN node_id TO eth_node_id; -ALTER TABLE public.blocks RENAME COLUMN node_id TO eth_node_id; +ALTER TABLE public.eth_nodes + DROP CONSTRAINT node_uc; +ALTER TABLE public.eth_nodes + ADD CONSTRAINT eth_node_uc UNIQUE (genesis_block, network_id, eth_node_id); -ALTER TABLE public.blocks DROP CONSTRAINT node_id_block_number_uc; ALTER TABLE public.blocks - ADD CONSTRAINT eth_node_id_block_number_uc UNIQUE (number, eth_node_id); + RENAME COLUMN node_id TO eth_node_id; -ALTER TABLE public.blocks DROP CONSTRAINT node_fk; ALTER TABLE public.blocks - ADD CONSTRAINT node_fk -FOREIGN KEY (eth_node_id) REFERENCES eth_nodes (id) ON DELETE CASCADE; + DROP CONSTRAINT node_id_block_number_uc; +ALTER TABLE public.blocks + ADD CONSTRAINT eth_node_id_block_number_uc UNIQUE (number, eth_node_id); + +ALTER TABLE public.blocks + DROP CONSTRAINT node_fk; +ALTER TABLE public.blocks + ADD CONSTRAINT node_fk + FOREIGN KEY (eth_node_id) REFERENCES eth_nodes (id) ON DELETE CASCADE; -- +goose Down ALTER TABLE public.eth_nodes - RENAME TO nodes; + RENAME TO nodes; ALTER TABLE public.nodes - RENAME COLUMN eth_node_id TO node_id; + RENAME COLUMN eth_node_id TO node_id; ALTER TABLE public.nodes - DROP CONSTRAINT eth_node_uc; + DROP CONSTRAINT eth_node_uc; ALTER TABLE public.nodes - ADD CONSTRAINT node_uc UNIQUE (genesis_block, network_id, node_id); + ADD CONSTRAINT node_uc UNIQUE (genesis_block, network_id, node_id); -ALTER TABLE public.blocks RENAME COLUMN eth_node_id TO node_id; - -ALTER TABLE public.blocks DROP CONSTRAINT eth_node_id_block_number_uc; ALTER TABLE public.blocks - ADD CONSTRAINT node_id_block_number_uc UNIQUE (number, node_id); + RENAME COLUMN eth_node_id TO node_id; -ALTER TABLE public.blocks DROP CONSTRAINT node_fk; ALTER TABLE public.blocks - ADD CONSTRAINT node_fk -FOREIGN KEY (node_id) REFERENCES nodes (id) ON DELETE CASCADE; + DROP CONSTRAINT eth_node_id_block_number_uc; +ALTER TABLE public.blocks + ADD CONSTRAINT node_id_block_number_uc UNIQUE (number, node_id); + +ALTER TABLE public.blocks + DROP CONSTRAINT node_fk; +ALTER TABLE public.blocks + ADD CONSTRAINT node_fk + FOREIGN KEY (node_id) REFERENCES nodes (id) ON DELETE CASCADE; diff --git a/db/migrations/00021_associate_receipts_with_blocks.sql b/db/migrations/00021_associate_receipts_with_blocks.sql index b60aa2d4..d8d3934a 100644 --- a/db/migrations/00021_associate_receipts_with_blocks.sql +++ b/db/migrations/00021_associate_receipts_with_blocks.sql @@ -1,44 +1,44 @@ -- +goose Up ALTER TABLE full_sync_receipts - ADD COLUMN block_id INT; + ADD COLUMN block_id INT; UPDATE full_sync_receipts - SET block_id = ( +SET block_id = ( SELECT block_id FROM full_sync_transactions WHERE full_sync_transactions.id = full_sync_receipts.transaction_id - ); +); ALTER TABLE full_sync_receipts - ALTER COLUMN block_id SET NOT NULL; + ALTER COLUMN block_id SET NOT NULL; ALTER TABLE full_sync_receipts - ADD CONSTRAINT blocks_fk -FOREIGN KEY (block_id) -REFERENCES blocks (id) -ON DELETE CASCADE; + ADD CONSTRAINT blocks_fk + FOREIGN KEY (block_id) + REFERENCES blocks (id) + ON DELETE CASCADE; ALTER TABLE full_sync_receipts - DROP COLUMN transaction_id; + DROP COLUMN transaction_id; -- +goose Down ALTER TABLE full_sync_receipts - ADD COLUMN transaction_id INT; + ADD COLUMN transaction_id INT; CREATE INDEX transaction_id_index ON full_sync_receipts (transaction_id); UPDATE full_sync_receipts - SET transaction_id = ( +SET transaction_id = ( SELECT id FROM full_sync_transactions WHERE full_sync_transactions.hash = full_sync_receipts.tx_hash - ); +); ALTER TABLE full_sync_receipts - ALTER COLUMN transaction_id SET NOT NULL; + ALTER COLUMN transaction_id SET NOT NULL; ALTER TABLE full_sync_receipts - ADD CONSTRAINT transaction_fk -FOREIGN KEY (transaction_id) -REFERENCES full_sync_transactions (id) -ON DELETE CASCADE; + ADD CONSTRAINT transaction_fk + FOREIGN KEY (transaction_id) + REFERENCES full_sync_transactions (id) + ON DELETE CASCADE; ALTER TABLE full_sync_receipts - DROP COLUMN block_id; + DROP COLUMN block_id; diff --git a/db/migrations/00022_add_eth_node_fingerprint_to_blocks.sql b/db/migrations/00022_add_eth_node_fingerprint_to_blocks.sql index 5c48c03e..b295abc8 100644 --- a/db/migrations/00022_add_eth_node_fingerprint_to_blocks.sql +++ b/db/migrations/00022_add_eth_node_fingerprint_to_blocks.sql @@ -1,16 +1,16 @@ -- +goose Up ALTER TABLE blocks - ADD COLUMN eth_node_fingerprint VARCHAR(128); + ADD COLUMN eth_node_fingerprint VARCHAR(128); UPDATE blocks - SET eth_node_fingerprint = ( +SET eth_node_fingerprint = ( SELECT eth_node_id FROM eth_nodes WHERE eth_nodes.id = blocks.eth_node_id - ); +); ALTER TABLE blocks - ALTER COLUMN eth_node_fingerprint SET NOT NULL; + ALTER COLUMN eth_node_fingerprint SET NOT NULL; -- +goose Down ALTER TABLE blocks - DROP COLUMN eth_node_fingerprint; + DROP COLUMN eth_node_fingerprint; diff --git a/db/migrations/00023_create_headers_table.sql b/db/migrations/00023_create_headers_table.sql index 925c202b..2c8f30a1 100644 --- a/db/migrations/00023_create_headers_table.sql +++ b/db/migrations/00023_create_headers_table.sql @@ -1,12 +1,13 @@ -- +goose Up -CREATE TABLE public.headers ( - id SERIAL PRIMARY KEY, - hash VARCHAR(66), - block_number BIGINT, - raw JSONB, - block_timestamp NUMERIC, - eth_node_id INTEGER NOT NULL REFERENCES eth_nodes (id) ON DELETE CASCADE, - eth_node_fingerprint VARCHAR(128) +CREATE TABLE public.headers +( + id SERIAL PRIMARY KEY, + hash VARCHAR(66), + block_number BIGINT, + raw JSONB, + block_timestamp NUMERIC, + eth_node_id INTEGER NOT NULL REFERENCES eth_nodes (id) ON DELETE CASCADE, + eth_node_fingerprint VARCHAR(128) ); -- Index is removed when table is diff --git a/db/migrations/00024_create_checked_headers_table.sql b/db/migrations/00024_create_checked_headers_table.sql index acf0fbdb..95cedf22 100644 --- a/db/migrations/00024_create_checked_headers_table.sql +++ b/db/migrations/00024_create_checked_headers_table.sql @@ -1,7 +1,9 @@ -- +goose Up -CREATE TABLE public.checked_headers ( - id SERIAL PRIMARY KEY, - header_id INTEGER UNIQUE NOT NULL REFERENCES headers (id) ON DELETE CASCADE +CREATE TABLE public.checked_headers +( + id SERIAL PRIMARY KEY, + header_id INTEGER UNIQUE NOT NULL REFERENCES headers (id) ON DELETE CASCADE, + check_count INTEGER NOT NULL DEFAULT 1 ); -- +goose Down diff --git a/db/migrations/00025_create_queued_storage.sql b/db/migrations/00025_create_queued_storage.sql index 79a3a548..71ab8886 100644 --- a/db/migrations/00025_create_queued_storage.sql +++ b/db/migrations/00025_create_queued_storage.sql @@ -1,12 +1,13 @@ -- +goose Up -CREATE TABLE public.queued_storage ( - id SERIAL PRIMARY KEY, - block_height BIGINT, - block_hash BYTEA, - contract BYTEA, - storage_key BYTEA, - storage_value BYTEA, - UNIQUE (block_height, block_hash, contract, storage_key, storage_value) +CREATE TABLE public.queued_storage +( + id SERIAL PRIMARY KEY, + block_height BIGINT, + block_hash BYTEA, + contract BYTEA, + storage_key BYTEA, + storage_value BYTEA, + UNIQUE (block_height, block_hash, contract, storage_key, storage_value) ); -- +goose Down diff --git a/db/migrations/00026_create_header_sync_transactions_table.sql b/db/migrations/00026_create_header_sync_transactions_table.sql index 469f8f85..358ce65c 100644 --- a/db/migrations/00026_create_header_sync_transactions_table.sql +++ b/db/migrations/00026_create_header_sync_transactions_table.sql @@ -1,18 +1,19 @@ -- +goose Up -CREATE TABLE header_sync_transactions ( - id SERIAL PRIMARY KEY, - header_id INTEGER NOT NULL REFERENCES headers(id) ON DELETE CASCADE, - hash VARCHAR(66), - gas_limit NUMERIC, - gas_price NUMERIC, - input_data BYTEA, - nonce NUMERIC, - raw BYTEA, - tx_from VARCHAR(44), - tx_index INTEGER, - tx_to VARCHAR(44), - "value" NUMERIC, - UNIQUE (header_id, hash) +CREATE TABLE header_sync_transactions +( + id SERIAL PRIMARY KEY, + header_id INTEGER NOT NULL REFERENCES headers (id) ON DELETE CASCADE, + hash VARCHAR(66), + gas_limit NUMERIC, + gas_price NUMERIC, + input_data BYTEA, + nonce NUMERIC, + raw BYTEA, + tx_from VARCHAR(44), + tx_index INTEGER, + tx_to VARCHAR(44), + "value" NUMERIC, + UNIQUE (header_id, hash) ); -- +goose Down diff --git a/db/migrations/00028_create_uncles_table.sql b/db/migrations/00028_create_uncles_table.sql index 9ec0ffa7..703d50f2 100644 --- a/db/migrations/00028_create_uncles_table.sql +++ b/db/migrations/00028_create_uncles_table.sql @@ -1,15 +1,16 @@ -- +goose Up -CREATE TABLE public.uncles ( - id SERIAL PRIMARY KEY, - hash VARCHAR(66) NOT NULL, - block_id INTEGER NOT NULL REFERENCES blocks (id) ON DELETE CASCADE, - reward NUMERIC NOT NULL, - miner VARCHAR(42) NOT NULL, - raw JSONB, - block_timestamp NUMERIC, - eth_node_id INTEGER NOT NULL REFERENCES eth_nodes (id) ON DELETE CASCADE, - eth_node_fingerprint VARCHAR(128), - UNIQUE (block_id, hash) +CREATE TABLE public.uncles +( + id SERIAL PRIMARY KEY, + hash VARCHAR(66) NOT NULL, + block_id INTEGER NOT NULL REFERENCES blocks (id) ON DELETE CASCADE, + reward NUMERIC NOT NULL, + miner VARCHAR(42) NOT NULL, + raw JSONB, + block_timestamp NUMERIC, + eth_node_id INTEGER NOT NULL REFERENCES eth_nodes (id) ON DELETE CASCADE, + eth_node_fingerprint VARCHAR(128), + UNIQUE (block_id, hash) ); -- +goose Down diff --git a/db/migrations/00029_create_header_sync_logs_table.sql b/db/migrations/00029_create_header_sync_logs_table.sql index 1d4cd4dd..83313ca7 100644 --- a/db/migrations/00029_create_header_sync_logs_table.sql +++ b/db/migrations/00029_create_header_sync_logs_table.sql @@ -13,6 +13,7 @@ CREATE TABLE header_sync_logs tx_index INTEGER, log_index INTEGER, raw JSONB, + transformed BOOL NOT NULL DEFAULT FALSE, UNIQUE (header_id, tx_index, log_index) ); diff --git a/db/schema.sql b/db/schema.sql index a8f04eb3..6942d6a9 100644 --- a/db/schema.sql +++ b/db/schema.sql @@ -131,7 +131,8 @@ ALTER SEQUENCE public.blocks_id_seq OWNED BY public.blocks.id; CREATE TABLE public.checked_headers ( id integer NOT NULL, - header_id integer NOT NULL + header_id integer NOT NULL, + check_count integer DEFAULT 1 NOT NULL ); @@ -311,7 +312,8 @@ CREATE TABLE public.header_sync_logs ( tx_hash character varying(66), tx_index integer, log_index integer, - raw jsonb + raw jsonb, + transformed boolean DEFAULT false NOT NULL ); diff --git a/integration_test/contract_test.go b/integration_test/contract_test.go index 3d2dd866..b5d0b07f 100644 --- a/integration_test/contract_test.go +++ b/integration_test/contract_test.go @@ -38,7 +38,7 @@ var _ = Describe("Reading contracts", func() { Describe("Getting a contract attribute", func() { It("retrieves the event log for a specific block and contract", func() { - expectedLogZero := core.Log{ + expectedLogZero := core.FullSyncLog{ BlockNumber: 4703824, TxHash: "0xf896bfd1eb539d881a1a31102b78de9f25cd591bf1fe1924b86148c0b205fd5d", Address: "0xd26114cd6ee289accf82350c8d8487fedb8a0c07", @@ -59,7 +59,7 @@ var _ = Describe("Reading contracts", func() { blockChain := geth.NewBlockChain(blockChainClient, rpcClient, node, transactionConverter) contract := testing.SampleContract() - logs, err := blockChain.GetLogs(contract, big.NewInt(4703824), nil) + logs, err := blockChain.GetFullSyncLogs(contract, big.NewInt(4703824), nil) Expect(err).To(BeNil()) Expect(len(logs)).To(Equal(3)) @@ -76,7 +76,7 @@ var _ = Describe("Reading contracts", func() { transactionConverter := rpc2.NewRpcTransactionConverter(ethClient) blockChain := geth.NewBlockChain(blockChainClient, rpcClient, node, transactionConverter) - logs, err := blockChain.GetLogs(core.Contract{Hash: "0x123"}, big.NewInt(4703824), nil) + logs, err := blockChain.GetFullSyncLogs(core.Contract{Hash: "0x123"}, big.NewInt(4703824), nil) Expect(err).To(BeNil()) Expect(len(logs)).To(Equal(0)) diff --git a/libraries/shared/chunker/log_chunker.go b/libraries/shared/chunker/log_chunker.go index c3a780ba..6adceb42 100644 --- a/libraries/shared/chunker/log_chunker.go +++ b/libraries/shared/chunker/log_chunker.go @@ -17,15 +17,14 @@ package chunker import ( - "strings" - "github.com/ethereum/go-ethereum/common" "github.com/vulcanize/vulcanizedb/libraries/shared/transformer" "github.com/vulcanize/vulcanizedb/pkg/core" + "strings" ) type Chunker interface { - AddConfigs(transformerConfigs []transformer.EventTransformerConfig) + AddConfig(transformerConfig transformer.EventTransformerConfig) ChunkLogs(logs []core.HeaderSyncLog) map[string][]core.HeaderSyncLog } @@ -43,14 +42,12 @@ func NewLogChunker() *LogChunker { } } -// Configures the chunker by adding more addreses and topics to consider. -func (chunker *LogChunker) AddConfigs(transformerConfigs []transformer.EventTransformerConfig) { - for _, config := range transformerConfigs { - for _, address := range config.ContractAddresses { - var lowerCaseAddress = strings.ToLower(address) - chunker.AddressToNames[lowerCaseAddress] = append(chunker.AddressToNames[lowerCaseAddress], config.TransformerName) - chunker.NameToTopic0[config.TransformerName] = common.HexToHash(config.Topic) - } +// Configures the chunker by adding one config with more addresses and topics to consider. +func (chunker *LogChunker) AddConfig(transformerConfig transformer.EventTransformerConfig) { + for _, address := range transformerConfig.ContractAddresses { + var lowerCaseAddress = strings.ToLower(address) + chunker.AddressToNames[lowerCaseAddress] = append(chunker.AddressToNames[lowerCaseAddress], transformerConfig.TransformerName) + chunker.NameToTopic0[transformerConfig.TransformerName] = common.HexToHash(transformerConfig.Topic) } } diff --git a/libraries/shared/chunker/log_chunker_test.go b/libraries/shared/chunker/log_chunker_test.go index 629c04b3..40bc78bc 100644 --- a/libraries/shared/chunker/log_chunker_test.go +++ b/libraries/shared/chunker/log_chunker_test.go @@ -21,7 +21,6 @@ import ( "github.com/ethereum/go-ethereum/core/types" . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" - chunk "github.com/vulcanize/vulcanizedb/libraries/shared/chunker" "github.com/vulcanize/vulcanizedb/libraries/shared/transformer" "github.com/vulcanize/vulcanizedb/pkg/core" @@ -29,31 +28,32 @@ import ( var _ = Describe("Log chunker", func() { var ( - configs []transformer.EventTransformerConfig chunker *chunk.LogChunker ) BeforeEach(func() { + chunker = chunk.NewLogChunker() + configA := transformer.EventTransformerConfig{ TransformerName: "TransformerA", ContractAddresses: []string{"0x00000000000000000000000000000000000000A1", "0x00000000000000000000000000000000000000A2"}, Topic: "0xA", } + chunker.AddConfig(configA) + configB := transformer.EventTransformerConfig{ TransformerName: "TransformerB", ContractAddresses: []string{"0x00000000000000000000000000000000000000B1"}, Topic: "0xB", } + chunker.AddConfig(configB) configC := transformer.EventTransformerConfig{ TransformerName: "TransformerC", ContractAddresses: []string{"0x00000000000000000000000000000000000000A2"}, Topic: "0xC", } - - configs = []transformer.EventTransformerConfig{configA, configB, configC} - chunker = chunk.NewLogChunker() - chunker.AddConfigs(configs) + chunker.AddConfig(configC) }) Describe("initialisation", func() { @@ -72,14 +72,14 @@ var _ = Describe("Log chunker", func() { }) }) - Describe("AddConfigs", func() { + Describe("AddConfig", func() { It("can add more configs later", func() { configD := transformer.EventTransformerConfig{ TransformerName: "TransformerD", ContractAddresses: []string{"0x000000000000000000000000000000000000000D"}, Topic: "0xD", } - chunker.AddConfigs([]transformer.EventTransformerConfig{configD}) + chunker.AddConfig(configD) Expect(chunker.AddressToNames).To(ContainElement([]string{"TransformerD"})) Expect(chunker.NameToTopic0).To(ContainElement(common.HexToHash("0xD"))) @@ -91,7 +91,7 @@ var _ = Describe("Log chunker", func() { ContractAddresses: []string{"0x000000000000000000000000000000000000000D"}, Topic: "0xD", } - chunker.AddConfigs([]transformer.EventTransformerConfig{configD}) + chunker.AddConfig(configD) Expect(chunker.AddressToNames["0x000000000000000000000000000000000000000d"]).To(Equal([]string{"TransformerD"})) }) diff --git a/libraries/shared/constants/checked_headers.go b/libraries/shared/constants/checked_headers.go index c3fdbeb3..af207039 100644 --- a/libraries/shared/constants/checked_headers.go +++ b/libraries/shared/constants/checked_headers.go @@ -21,5 +21,5 @@ type TransformerExecution bool const ( HeaderRecheck TransformerExecution = true HeaderMissing TransformerExecution = false - RecheckHeaderCap = "4" + RecheckHeaderCap = int64(5) ) diff --git a/libraries/shared/factories/event/repository.go b/libraries/shared/factories/event/repository.go index 96188cbd..c537455b 100644 --- a/libraries/shared/factories/event/repository.go +++ b/libraries/shared/factories/event/repository.go @@ -16,12 +16,9 @@ package event -import ( - "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres" -) +import "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres" type Repository interface { - Create(headerID int64, models []interface{}) error - MarkHeaderChecked(headerID int64) error + Create(models []interface{}) error SetDB(db *postgres.DB) } diff --git a/libraries/shared/factories/event/transformer.go b/libraries/shared/factories/event/transformer.go index 19c80eec..ee7b0a6b 100644 --- a/libraries/shared/factories/event/transformer.go +++ b/libraries/shared/factories/event/transformer.go @@ -18,7 +18,6 @@ package event import ( "github.com/sirupsen/logrus" - "github.com/vulcanize/vulcanizedb/libraries/shared/transformer" "github.com/vulcanize/vulcanizedb/pkg/core" "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres" @@ -35,34 +34,29 @@ func (transformer Transformer) NewTransformer(db *postgres.DB) transformer.Event return transformer } -func (transformer Transformer) Execute(logs []core.HeaderSyncLog, headerID int64) error { +func (transformer Transformer) Execute(logs []core.HeaderSyncLog) error { transformerName := transformer.Config.TransformerName config := transformer.Config if len(logs) < 1 { - err := transformer.Repository.MarkHeaderChecked(headerID) - if err != nil { - logrus.Printf("Error marking header as checked in %v: %v", transformerName, err) - return err - } return nil } entities, err := transformer.Converter.ToEntities(config.ContractAbi, logs) if err != nil { - logrus.Printf("Error converting logs to entities in %v: %v", transformerName, err) + logrus.Errorf("error converting logs to entities in %v: %v", transformerName, err) return err } models, err := transformer.Converter.ToModels(entities) if err != nil { - logrus.Printf("Error converting entities to models in %v: %v", transformerName, err) + logrus.Errorf("error converting entities to models in %v: %v", transformerName, err) return err } - err = transformer.Repository.Create(headerID, models) + err = transformer.Repository.Create(models) if err != nil { - logrus.Printf("Error persisting %v record: %v", transformerName, err) + logrus.Errorf("error persisting %v record: %v", transformerName, err) return err } diff --git a/libraries/shared/factories/event/transformer_test.go b/libraries/shared/factories/event/transformer_test.go index c13b0e0b..2306f251 100644 --- a/libraries/shared/factories/event/transformer_test.go +++ b/libraries/shared/factories/event/transformer_test.go @@ -17,22 +17,20 @@ package event_test import ( - "math/rand" - . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" - "github.com/vulcanize/vulcanizedb/libraries/shared/factories/event" "github.com/vulcanize/vulcanizedb/libraries/shared/mocks" "github.com/vulcanize/vulcanizedb/libraries/shared/test_data" "github.com/vulcanize/vulcanizedb/libraries/shared/transformer" "github.com/vulcanize/vulcanizedb/pkg/core" "github.com/vulcanize/vulcanizedb/pkg/fakes" + "math/rand" ) var _ = Describe("Transformer", func() { var ( - repository mocks.MockRepository + repository mocks.MockEventRepository converter mocks.MockConverter t transformer.EventTransformer headerOne core.Header @@ -41,7 +39,7 @@ var _ = Describe("Transformer", func() { ) BeforeEach(func() { - repository = mocks.MockRepository{} + repository = mocks.MockEventRepository{} converter = mocks.MockConverter{} t = event.Transformer{ @@ -64,15 +62,8 @@ var _ = Describe("Transformer", func() { Expect(repository.SetDbCalled).To(BeTrue()) }) - It("marks header checked if no logs returned", func() { - err := t.Execute([]core.HeaderSyncLog{}, headerOne.Id) - - Expect(err).NotTo(HaveOccurred()) - repository.AssertMarkHeaderCheckedCalledWith(headerOne.Id) - }) - It("doesn't attempt to convert or persist an empty collection when there are no logs", func() { - err := t.Execute([]core.HeaderSyncLog{}, headerOne.Id) + err := t.Execute([]core.HeaderSyncLog{}) Expect(err).NotTo(HaveOccurred()) Expect(converter.ToEntitiesCalledCounter).To(Equal(0)) @@ -80,24 +71,8 @@ var _ = Describe("Transformer", func() { Expect(repository.CreateCalledCounter).To(Equal(0)) }) - It("does not call repository.MarkCheckedHeader when there are logs", func() { - err := t.Execute(logs, headerOne.Id) - - Expect(err).NotTo(HaveOccurred()) - repository.AssertMarkHeaderCheckedNotCalled() - }) - - It("returns error if marking header checked returns err", func() { - repository.SetMarkHeaderCheckedError(fakes.FakeError) - - err := t.Execute([]core.HeaderSyncLog{}, headerOne.Id) - - Expect(err).To(HaveOccurred()) - Expect(err).To(MatchError(fakes.FakeError)) - }) - It("converts an eth log to an entity", func() { - err := t.Execute(logs, headerOne.Id) + err := t.Execute(logs) Expect(err).NotTo(HaveOccurred()) Expect(converter.ContractAbi).To(Equal(config.ContractAbi)) @@ -107,7 +82,7 @@ var _ = Describe("Transformer", func() { It("returns an error if converter fails", func() { converter.ToEntitiesError = fakes.FakeError - err := t.Execute(logs, headerOne.Id) + err := t.Execute(logs) Expect(err).To(HaveOccurred()) Expect(err).To(MatchError(fakes.FakeError)) @@ -116,7 +91,7 @@ var _ = Describe("Transformer", func() { It("converts an entity to a model", func() { converter.EntitiesToReturn = []interface{}{test_data.GenericEntity{}} - err := t.Execute(logs, headerOne.Id) + err := t.Execute(logs) Expect(err).NotTo(HaveOccurred()) Expect(converter.EntitiesToConvert[0]).To(Equal(test_data.GenericEntity{})) @@ -126,7 +101,7 @@ var _ = Describe("Transformer", func() { converter.EntitiesToReturn = []interface{}{test_data.GenericEntity{}} converter.ToModelsError = fakes.FakeError - err := t.Execute(logs, headerOne.Id) + err := t.Execute(logs) Expect(err).To(HaveOccurred()) Expect(err).To(MatchError(fakes.FakeError)) @@ -135,16 +110,15 @@ var _ = Describe("Transformer", func() { It("persists the record", func() { converter.ModelsToReturn = []interface{}{test_data.GenericModel{}} - err := t.Execute(logs, headerOne.Id) + err := t.Execute(logs) Expect(err).NotTo(HaveOccurred()) - Expect(repository.PassedHeaderID).To(Equal(headerOne.Id)) Expect(repository.PassedModels[0]).To(Equal(test_data.GenericModel{})) }) It("returns error if persisting the record fails", func() { repository.SetCreateError(fakes.FakeError) - err := t.Execute(logs, headerOne.Id) + err := t.Execute(logs) Expect(err).To(HaveOccurred()) Expect(err).To(MatchError(fakes.FakeError)) diff --git a/libraries/shared/repository/storage_repository.go b/libraries/shared/factories/storage/repository.go similarity index 94% rename from libraries/shared/repository/storage_repository.go rename to libraries/shared/factories/storage/repository.go index f921d5aa..3146f3ba 100644 --- a/libraries/shared/repository/storage_repository.go +++ b/libraries/shared/factories/storage/repository.go @@ -14,14 +14,14 @@ // You should have received a copy of the GNU Affero General Public License // along with this program. If not, see . -package repository +package storage import ( "github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils" "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres" ) -type StorageRepository interface { +type Repository interface { Create(blockNumber int, blockHash string, metadata utils.StorageValueMetadata, value interface{}) error SetDB(db *postgres.DB) } diff --git a/libraries/shared/factories/storage/transformer.go b/libraries/shared/factories/storage/transformer.go index fce04465..d7cf6275 100644 --- a/libraries/shared/factories/storage/transformer.go +++ b/libraries/shared/factories/storage/transformer.go @@ -19,7 +19,6 @@ package storage import ( "github.com/ethereum/go-ethereum/common" - "github.com/vulcanize/vulcanizedb/libraries/shared/repository" "github.com/vulcanize/vulcanizedb/libraries/shared/storage" "github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils" "github.com/vulcanize/vulcanizedb/libraries/shared/transformer" @@ -29,7 +28,7 @@ import ( type Transformer struct { Address common.Address Mappings storage.Mappings - Repository repository.StorageRepository + Repository Repository } func (transformer Transformer) NewTransformer(db *postgres.DB) transformer.StorageTransformer { diff --git a/libraries/shared/fetcher/log_fetcher.go b/libraries/shared/fetcher/log_fetcher.go index a2b5f21a..18c7aa0f 100644 --- a/libraries/shared/fetcher/log_fetcher.go +++ b/libraries/shared/fetcher/log_fetcher.go @@ -26,6 +26,7 @@ import ( type ILogFetcher interface { FetchLogs(contractAddresses []common.Address, topics []common.Hash, missingHeader core.Header) ([]types.Log, error) + // TODO Extend FetchLogs for doing several blocks at a time } type LogFetcher struct { diff --git a/libraries/shared/logs/delegator.go b/libraries/shared/logs/delegator.go new file mode 100644 index 00000000..67637a53 --- /dev/null +++ b/libraries/shared/logs/delegator.go @@ -0,0 +1,78 @@ +// VulcanizeDB +// Copyright © 2019 Vulcanize + +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU Affero General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU Affero General Public License for more details. + +// You should have received a copy of the GNU Affero General Public License +// along with this program. If not, see . + +package logs + +import ( + "errors" + "github.com/sirupsen/logrus" + "github.com/vulcanize/vulcanizedb/libraries/shared/chunker" + "github.com/vulcanize/vulcanizedb/libraries/shared/transformer" + "github.com/vulcanize/vulcanizedb/pkg/core" + "github.com/vulcanize/vulcanizedb/pkg/datastore" +) + +var ErrNoTransformers = errors.New("no event transformers configured in the log delegator") + +type ILogDelegator interface { + AddTransformer(t transformer.EventTransformer) + DelegateLogs() error +} + +type LogDelegator struct { + Chunker chunker.Chunker + LogRepository datastore.HeaderSyncLogRepository + Transformers []transformer.EventTransformer +} + +func (delegator *LogDelegator) AddTransformer(t transformer.EventTransformer) { + delegator.Transformers = append(delegator.Transformers, t) + delegator.Chunker.AddConfig(t.GetConfig()) +} + +func (delegator LogDelegator) DelegateLogs() error { + if len(delegator.Transformers) < 1 { + return ErrNoTransformers + } + + persistedLogs, fetchErr := delegator.LogRepository.GetUntransformedHeaderSyncLogs() + if fetchErr != nil { + logrus.Errorf("error loading logs from db: %s", fetchErr.Error()) + return fetchErr + } + + transformErr := delegator.delegateLogs(persistedLogs) + if transformErr != nil { + logrus.Errorf("error transforming logs: %s", transformErr) + return transformErr + } + + return nil +} + +func (delegator *LogDelegator) delegateLogs(logs []core.HeaderSyncLog) error { + chunkedLogs := delegator.Chunker.ChunkLogs(logs) + for _, t := range delegator.Transformers { + transformerName := t.GetConfig().TransformerName + logChunk := chunkedLogs[transformerName] + err := t.Execute(logChunk) + if err != nil { + logrus.Errorf("%v transformer failed to execute in watcher: %v", transformerName, err) + return err + } + } + return nil +} diff --git a/libraries/shared/logs/delegator_test.go b/libraries/shared/logs/delegator_test.go new file mode 100644 index 00000000..15e59a9d --- /dev/null +++ b/libraries/shared/logs/delegator_test.go @@ -0,0 +1,134 @@ +// VulcanizeDB +// Copyright © 2019 Vulcanize + +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU Affero General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU Affero General Public License for more details. + +// You should have received a copy of the GNU Affero General Public License +// along with this program. If not, see . + +package logs_test + +import ( + "github.com/ethereum/go-ethereum/common" + "github.com/ethereum/go-ethereum/core/types" + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + "github.com/vulcanize/vulcanizedb/libraries/shared/chunker" + "github.com/vulcanize/vulcanizedb/libraries/shared/logs" + "github.com/vulcanize/vulcanizedb/libraries/shared/mocks" + "github.com/vulcanize/vulcanizedb/libraries/shared/transformer" + "github.com/vulcanize/vulcanizedb/pkg/core" + "github.com/vulcanize/vulcanizedb/pkg/fakes" + "strings" +) + +var _ = Describe("Log delegator", func() { + Describe("AddTransformer", func() { + It("adds transformers to the delegator", func() { + fakeTransformer := &mocks.MockEventTransformer{} + delegator := logs.LogDelegator{Chunker: chunker.NewLogChunker()} + + delegator.AddTransformer(fakeTransformer) + + Expect(delegator.Transformers).To(Equal([]transformer.EventTransformer{fakeTransformer})) + }) + + It("passes transformers' configs to the chunker", func() { + fakeTransformer := &mocks.MockEventTransformer{} + fakeConfig := mocks.FakeTransformerConfig + fakeTransformer.SetTransformerConfig(fakeConfig) + chunker := chunker.NewLogChunker() + delegator := logs.LogDelegator{Chunker: chunker} + + delegator.AddTransformer(fakeTransformer) + + expectedName := fakeConfig.TransformerName + expectedTopic := common.HexToHash(fakeConfig.Topic) + Expect(chunker.NameToTopic0).To(Equal(map[string]common.Hash{expectedName: expectedTopic})) + expectedAddress := strings.ToLower(fakeConfig.ContractAddresses[0]) + Expect(chunker.AddressToNames).To(Equal(map[string][]string{expectedAddress: {expectedName}})) + }) + }) + + Describe("DelegateLogs", func() { + It("returns an error if no transformers configured", func() { + delegator := newDelegator(&fakes.MockHeaderSyncLogRepository{}) + + err := delegator.DelegateLogs() + + Expect(err).To(HaveOccurred()) + Expect(err).To(MatchError(logs.ErrNoTransformers)) + }) + + It("gets untransformed logs", func() { + mockLogRepository := &fakes.MockHeaderSyncLogRepository{} + delegator := newDelegator(mockLogRepository) + delegator.AddTransformer(&mocks.MockEventTransformer{}) + + err := delegator.DelegateLogs() + + Expect(err).NotTo(HaveOccurred()) + Expect(mockLogRepository.GetCalled).To(BeTrue()) + }) + + It("returns error if getting untransformed logs fails", func() { + mockLogRepository := &fakes.MockHeaderSyncLogRepository{} + mockLogRepository.GetError = fakes.FakeError + delegator := newDelegator(mockLogRepository) + delegator.AddTransformer(&mocks.MockEventTransformer{}) + + err := delegator.DelegateLogs() + + Expect(err).To(HaveOccurred()) + Expect(err).To(MatchError(fakes.FakeError)) + }) + + It("delegates chunked logs to transformers", func() { + fakeTransformer := &mocks.MockEventTransformer{} + fakeTransformer.SetTransformerConfig(mocks.FakeTransformerConfig) + fakeGethLog := types.Log{ + Address: common.HexToAddress(fakeTransformer.GetConfig().ContractAddresses[0]), + Topics: []common.Hash{common.HexToHash(fakeTransformer.GetConfig().Topic)}, + } + fakeHeaderSyncLog := core.HeaderSyncLog{Log: fakeGethLog} + fakeHeaderSyncLogs := []core.HeaderSyncLog{fakeHeaderSyncLog} + mockLogRepository := &fakes.MockHeaderSyncLogRepository{} + mockLogRepository.ReturnLogs = fakeHeaderSyncLogs + delegator := newDelegator(mockLogRepository) + delegator.AddTransformer(fakeTransformer) + + err := delegator.DelegateLogs() + + Expect(err).NotTo(HaveOccurred()) + Expect(fakeTransformer.ExecuteWasCalled).To(BeTrue()) + Expect(fakeTransformer.PassedLogs).To(Equal(fakeHeaderSyncLogs)) + }) + + It("returns an error if transformer returns an error", func() { + delegator := newDelegator(&fakes.MockHeaderSyncLogRepository{}) + fakeTransformer := &mocks.MockEventTransformer{ExecuteError: fakes.FakeError} + delegator.AddTransformer(fakeTransformer) + + err := delegator.DelegateLogs() + + Expect(err).To(HaveOccurred()) + Expect(err).To(MatchError(fakes.FakeError)) + }) + }) + +}) + +func newDelegator(headerSyncLogRepository *fakes.MockHeaderSyncLogRepository) logs.LogDelegator { + return logs.LogDelegator{ + Chunker: chunker.NewLogChunker(), + LogRepository: headerSyncLogRepository, + } +} diff --git a/libraries/shared/logs/extractor.go b/libraries/shared/logs/extractor.go new file mode 100644 index 00000000..fccce1b5 --- /dev/null +++ b/libraries/shared/logs/extractor.go @@ -0,0 +1,122 @@ +// VulcanizeDB +// Copyright © 2019 Vulcanize + +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU Affero General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU Affero General Public License for more details. + +// You should have received a copy of the GNU Affero General Public License +// along with this program. If not, see . + +package logs + +import ( + "errors" + "github.com/ethereum/go-ethereum/common" + "github.com/sirupsen/logrus" + "github.com/vulcanize/vulcanizedb/libraries/shared/constants" + "github.com/vulcanize/vulcanizedb/libraries/shared/fetcher" + "github.com/vulcanize/vulcanizedb/libraries/shared/transactions" + "github.com/vulcanize/vulcanizedb/libraries/shared/transformer" + "github.com/vulcanize/vulcanizedb/pkg/core" + "github.com/vulcanize/vulcanizedb/pkg/datastore" +) + +var ErrNoWatchedAddresses = errors.New("no watched addresses configured in the log extractor") + +type ILogExtractor interface { + AddTransformerConfig(config transformer.EventTransformerConfig) + ExtractLogs(recheckHeaders constants.TransformerExecution) error +} + +type LogExtractor struct { + Addresses []common.Address + CheckedHeadersRepository datastore.CheckedHeadersRepository + Fetcher fetcher.ILogFetcher + LogRepository datastore.HeaderSyncLogRepository + StartingBlock *int64 + Syncer transactions.ITransactionsSyncer + Topics []common.Hash +} + +// Add additional logs to extract +func (extractor *LogExtractor) AddTransformerConfig(config transformer.EventTransformerConfig) { + if extractor.StartingBlock == nil { + extractor.StartingBlock = &config.StartingBlockNumber + } else if earlierStartingBlockNumber(config.StartingBlockNumber, *extractor.StartingBlock) { + extractor.StartingBlock = &config.StartingBlockNumber + } + + addresses := transformer.HexStringsToAddresses(config.ContractAddresses) + extractor.Addresses = append(extractor.Addresses, addresses...) + extractor.Topics = append(extractor.Topics, common.HexToHash(config.Topic)) +} + +// Fetch and persist watched logs +func (extractor LogExtractor) ExtractLogs(recheckHeaders constants.TransformerExecution) error { + if len(extractor.Addresses) < 1 { + logrus.Errorf("error extracting logs: %s", ErrNoWatchedAddresses.Error()) + return ErrNoWatchedAddresses + } + + missingHeaders, missingHeadersErr := extractor.CheckedHeadersRepository.MissingHeaders(*extractor.StartingBlock, -1, getCheckCount(recheckHeaders)) + if missingHeadersErr != nil { + logrus.Errorf("error fetching missing headers: %s", missingHeadersErr) + return missingHeadersErr + } + + for _, header := range missingHeaders { + logs, fetchLogsErr := extractor.Fetcher.FetchLogs(extractor.Addresses, extractor.Topics, header) + if fetchLogsErr != nil { + logError("error fetching logs for header: %s", fetchLogsErr, header) + return fetchLogsErr + } + + if len(logs) > 0 { + transactionsSyncErr := extractor.Syncer.SyncTransactions(header.Id, logs) + if transactionsSyncErr != nil { + logError("error syncing transactions: %s", transactionsSyncErr, header) + return transactionsSyncErr + } + + createLogsErr := extractor.LogRepository.CreateHeaderSyncLogs(header.Id, logs) + if createLogsErr != nil { + logError("error persisting logs: %s", createLogsErr, header) + return createLogsErr + } + } + + markHeaderCheckedErr := extractor.CheckedHeadersRepository.MarkHeaderChecked(header.Id) + if markHeaderCheckedErr != nil { + logError("error marking header checked: %s", markHeaderCheckedErr, header) + return markHeaderCheckedErr + } + } + return nil +} + +func earlierStartingBlockNumber(transformerBlock, watcherBlock int64) bool { + return transformerBlock < watcherBlock +} + +func logError(description string, err error, header core.Header) { + logrus.WithFields(logrus.Fields{ + "headerId": header.Id, + "headerHash": header.Hash, + "blockNumber": header.BlockNumber, + }).Errorf(description, err.Error()) +} + +func getCheckCount(recheckHeaders constants.TransformerExecution) int64 { + if recheckHeaders == constants.HeaderMissing { + return 1 + } else { + return constants.RecheckHeaderCap + } +} diff --git a/libraries/shared/logs/extractor_test.go b/libraries/shared/logs/extractor_test.go new file mode 100644 index 00000000..151086be --- /dev/null +++ b/libraries/shared/logs/extractor_test.go @@ -0,0 +1,310 @@ +// VulcanizeDB +// Copyright © 2019 Vulcanize + +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU Affero General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU Affero General Public License for more details. + +// You should have received a copy of the GNU Affero General Public License +// along with this program. If not, see . + +package logs_test + +import ( + "github.com/ethereum/go-ethereum/common" + "github.com/ethereum/go-ethereum/core/types" + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + "github.com/vulcanize/vulcanizedb/libraries/shared/constants" + "github.com/vulcanize/vulcanizedb/libraries/shared/logs" + "github.com/vulcanize/vulcanizedb/libraries/shared/mocks" + "github.com/vulcanize/vulcanizedb/libraries/shared/transformer" + "github.com/vulcanize/vulcanizedb/pkg/core" + "github.com/vulcanize/vulcanizedb/pkg/fakes" + "math/rand" +) + +var _ = Describe("Log extractor", func() { + var extractor *logs.LogExtractor + + BeforeEach(func() { + extractor = &logs.LogExtractor{ + Fetcher: &mocks.MockLogFetcher{}, + CheckedHeadersRepository: &fakes.MockCheckedHeadersRepository{}, + LogRepository: &fakes.MockHeaderSyncLogRepository{}, + Syncer: &fakes.MockTransactionSyncer{}, + } + }) + + Describe("AddTransformerConfig", func() { + It("it includes earliest starting block number in fetch logs query", func() { + earlierStartingBlockNumber := rand.Int63() + laterStartingBlockNumber := earlierStartingBlockNumber + 1 + + extractor.AddTransformerConfig(getTransformerConfig(laterStartingBlockNumber)) + extractor.AddTransformerConfig(getTransformerConfig(earlierStartingBlockNumber)) + + Expect(*extractor.StartingBlock).To(Equal(earlierStartingBlockNumber)) + }) + + It("includes added addresses in fetch logs query", func() { + addresses := []string{"0xA", "0xB"} + configWithAddresses := transformer.EventTransformerConfig{ + ContractAddresses: addresses, + StartingBlockNumber: rand.Int63(), + } + + extractor.AddTransformerConfig(configWithAddresses) + + expectedAddresses := transformer.HexStringsToAddresses(addresses) + Expect(extractor.Addresses).To(Equal(expectedAddresses)) + }) + + It("includes added topics in fetch logs query", func() { + topic := "0x1" + configWithTopic := transformer.EventTransformerConfig{ + ContractAddresses: []string{fakes.FakeAddress.Hex()}, + Topic: topic, + StartingBlockNumber: rand.Int63(), + } + + extractor.AddTransformerConfig(configWithTopic) + + Expect(extractor.Topics).To(Equal([]common.Hash{common.HexToHash(topic)})) + }) + }) + + Describe("ExtractLogs", func() { + It("returns error if no watched addresses configured", func() { + err := extractor.ExtractLogs(constants.HeaderMissing) + + Expect(err).To(HaveOccurred()) + Expect(err).To(MatchError(logs.ErrNoWatchedAddresses)) + }) + + Describe("when checking missing headers", func() { + It("gets missing headers since configured starting block with check count < 1", func() { + mockCheckedHeadersRepository := &fakes.MockCheckedHeadersRepository{} + mockCheckedHeadersRepository.ReturnHeaders = []core.Header{{}} + extractor.CheckedHeadersRepository = mockCheckedHeadersRepository + startingBlockNumber := rand.Int63() + extractor.AddTransformerConfig(getTransformerConfig(startingBlockNumber)) + + err := extractor.ExtractLogs(constants.HeaderMissing) + + Expect(err).NotTo(HaveOccurred()) + Expect(mockCheckedHeadersRepository.StartingBlockNumber).To(Equal(startingBlockNumber)) + Expect(mockCheckedHeadersRepository.EndingBlockNumber).To(Equal(int64(-1))) + Expect(mockCheckedHeadersRepository.CheckCount).To(Equal(int64(1))) + }) + }) + + Describe("when rechecking headers", func() { + It("gets missing headers since configured starting block with check count < 1", func() { + mockCheckedHeadersRepository := &fakes.MockCheckedHeadersRepository{} + mockCheckedHeadersRepository.ReturnHeaders = []core.Header{{}} + extractor.CheckedHeadersRepository = mockCheckedHeadersRepository + startingBlockNumber := rand.Int63() + extractor.AddTransformerConfig(getTransformerConfig(startingBlockNumber)) + + err := extractor.ExtractLogs(constants.HeaderRecheck) + + Expect(err).NotTo(HaveOccurred()) + Expect(mockCheckedHeadersRepository.StartingBlockNumber).To(Equal(startingBlockNumber)) + Expect(mockCheckedHeadersRepository.EndingBlockNumber).To(Equal(int64(-1))) + Expect(mockCheckedHeadersRepository.CheckCount).To(Equal(constants.RecheckHeaderCap)) + }) + }) + + It("returns error if getting missing headers fails", func() { + addTransformerConfig(extractor) + mockCheckedHeadersRepository := &fakes.MockCheckedHeadersRepository{} + mockCheckedHeadersRepository.MissingHeadersReturnError = fakes.FakeError + extractor.CheckedHeadersRepository = mockCheckedHeadersRepository + + err := extractor.ExtractLogs(constants.HeaderMissing) + + Expect(err).To(HaveOccurred()) + Expect(err).To(MatchError(fakes.FakeError)) + }) + + It("does not fetch logs if no missing headers", func() { + addTransformerConfig(extractor) + mockLogFetcher := &mocks.MockLogFetcher{} + extractor.Fetcher = mockLogFetcher + + err := extractor.ExtractLogs(constants.HeaderMissing) + + Expect(err).NotTo(HaveOccurred()) + Expect(mockLogFetcher.FetchCalled).To(BeFalse()) + }) + + It("fetches logs for missing headers", func() { + addMissingHeader(extractor) + config := transformer.EventTransformerConfig{ + ContractAddresses: []string{fakes.FakeAddress.Hex()}, + Topic: fakes.FakeHash.Hex(), + StartingBlockNumber: rand.Int63(), + } + extractor.AddTransformerConfig(config) + mockLogFetcher := &mocks.MockLogFetcher{} + extractor.Fetcher = mockLogFetcher + + err := extractor.ExtractLogs(constants.HeaderMissing) + + Expect(err).NotTo(HaveOccurred()) + Expect(mockLogFetcher.FetchCalled).To(BeTrue()) + Expect(mockLogFetcher.Topics).To(Equal([]common.Hash{common.HexToHash(config.Topic)})) + Expect(mockLogFetcher.ContractAddresses).To(Equal(transformer.HexStringsToAddresses(config.ContractAddresses))) + }) + + It("returns error if fetching logs fails", func() { + addMissingHeader(extractor) + addTransformerConfig(extractor) + mockLogFetcher := &mocks.MockLogFetcher{} + mockLogFetcher.ReturnError = fakes.FakeError + extractor.Fetcher = mockLogFetcher + + err := extractor.ExtractLogs(constants.HeaderMissing) + + Expect(err).To(HaveOccurred()) + Expect(err).To(MatchError(fakes.FakeError)) + }) + + It("does not sync transactions if no fetched logs", func() { + addMissingHeader(extractor) + addTransformerConfig(extractor) + mockTransactionSyncer := &fakes.MockTransactionSyncer{} + extractor.Syncer = mockTransactionSyncer + + err := extractor.ExtractLogs(constants.HeaderMissing) + + Expect(err).NotTo(HaveOccurred()) + Expect(mockTransactionSyncer.SyncTransactionsCalled).To(BeFalse()) + }) + + It("syncs transactions for fetched logs", func() { + addMissingHeader(extractor) + addFetchedLog(extractor) + addTransformerConfig(extractor) + mockTransactionSyncer := &fakes.MockTransactionSyncer{} + extractor.Syncer = mockTransactionSyncer + + err := extractor.ExtractLogs(constants.HeaderMissing) + + Expect(err).NotTo(HaveOccurred()) + Expect(mockTransactionSyncer.SyncTransactionsCalled).To(BeTrue()) + }) + + It("returns error if syncing transactions fails", func() { + addMissingHeader(extractor) + addFetchedLog(extractor) + addTransformerConfig(extractor) + mockTransactionSyncer := &fakes.MockTransactionSyncer{} + mockTransactionSyncer.SyncTransactionsError = fakes.FakeError + extractor.Syncer = mockTransactionSyncer + + err := extractor.ExtractLogs(constants.HeaderMissing) + + Expect(err).To(HaveOccurred()) + Expect(err).To(MatchError(fakes.FakeError)) + }) + + It("persists fetched logs", func() { + addMissingHeader(extractor) + addTransformerConfig(extractor) + fakeLogs := []types.Log{{ + Address: common.HexToAddress("0xA"), + Topics: []common.Hash{common.HexToHash("0xA")}, + Data: []byte{}, + Index: 0, + }} + mockLogFetcher := &mocks.MockLogFetcher{ReturnLogs: fakeLogs} + extractor.Fetcher = mockLogFetcher + mockLogRepository := &fakes.MockHeaderSyncLogRepository{} + extractor.LogRepository = mockLogRepository + + err := extractor.ExtractLogs(constants.HeaderMissing) + + Expect(err).NotTo(HaveOccurred()) + Expect(mockLogRepository.PassedLogs).To(Equal(fakeLogs)) + }) + + It("returns error if persisting logs fails", func() { + addMissingHeader(extractor) + addFetchedLog(extractor) + addTransformerConfig(extractor) + mockLogRepository := &fakes.MockHeaderSyncLogRepository{} + mockLogRepository.CreateError = fakes.FakeError + extractor.LogRepository = mockLogRepository + + err := extractor.ExtractLogs(constants.HeaderMissing) + + Expect(err).To(HaveOccurred()) + Expect(err).To(MatchError(fakes.FakeError)) + }) + + It("marks header checked", func() { + addFetchedLog(extractor) + addTransformerConfig(extractor) + mockCheckedHeadersRepository := &fakes.MockCheckedHeadersRepository{} + headerID := rand.Int63() + mockCheckedHeadersRepository.ReturnHeaders = []core.Header{{Id: headerID}} + extractor.CheckedHeadersRepository = mockCheckedHeadersRepository + + err := extractor.ExtractLogs(constants.HeaderMissing) + + Expect(err).NotTo(HaveOccurred()) + Expect(mockCheckedHeadersRepository.HeaderID).To(Equal(headerID)) + }) + + It("returns error if marking header checked fails", func() { + addFetchedLog(extractor) + addTransformerConfig(extractor) + mockCheckedHeadersRepository := &fakes.MockCheckedHeadersRepository{} + mockCheckedHeadersRepository.ReturnHeaders = []core.Header{{Id: rand.Int63()}} + mockCheckedHeadersRepository.MarkHeaderCheckedReturnError = fakes.FakeError + extractor.CheckedHeadersRepository = mockCheckedHeadersRepository + + err := extractor.ExtractLogs(constants.HeaderMissing) + + Expect(err).To(HaveOccurred()) + Expect(err).To(MatchError(fakes.FakeError)) + }) + }) +}) + +func addTransformerConfig(extractor *logs.LogExtractor) { + fakeConfig := transformer.EventTransformerConfig{ + ContractAddresses: []string{fakes.FakeAddress.Hex()}, + Topic: fakes.FakeHash.Hex(), + StartingBlockNumber: rand.Int63(), + } + extractor.AddTransformerConfig(fakeConfig) +} + +func addMissingHeader(extractor *logs.LogExtractor) { + mockCheckedHeadersRepository := &fakes.MockCheckedHeadersRepository{} + mockCheckedHeadersRepository.ReturnHeaders = []core.Header{{}} + extractor.CheckedHeadersRepository = mockCheckedHeadersRepository +} + +func addFetchedLog(extractor *logs.LogExtractor) { + mockLogFetcher := &mocks.MockLogFetcher{} + mockLogFetcher.ReturnLogs = []types.Log{{}} + extractor.Fetcher = mockLogFetcher +} + +func getTransformerConfig(startingBlockNumber int64) transformer.EventTransformerConfig { + return transformer.EventTransformerConfig{ + ContractAddresses: []string{fakes.FakeAddress.Hex()}, + Topic: fakes.FakeHash.Hex(), + StartingBlockNumber: startingBlockNumber, + } +} diff --git a/libraries/shared/logs/logs_suite_test.go b/libraries/shared/logs/logs_suite_test.go new file mode 100644 index 00000000..5227bac7 --- /dev/null +++ b/libraries/shared/logs/logs_suite_test.go @@ -0,0 +1,35 @@ +// VulcanizeDB +// Copyright © 2019 Vulcanize + +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU Affero General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU Affero General Public License for more details. + +// You should have received a copy of the GNU Affero General Public License +// along with this program. If not, see . + +package logs_test + +import ( + "github.com/sirupsen/logrus" + "io/ioutil" + "testing" + + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" +) + +func TestLogs(t *testing.T) { + RegisterFailHandler(Fail) + RunSpecs(t, "Logs Suite") +} + +var _ = BeforeSuite(func() { + logrus.SetOutput(ioutil.Discard) +}) diff --git a/libraries/shared/mocks/converter.go b/libraries/shared/mocks/event_converter.go similarity index 100% rename from libraries/shared/mocks/converter.go rename to libraries/shared/mocks/event_converter.go diff --git a/libraries/shared/mocks/repository.go b/libraries/shared/mocks/event_repository.go similarity index 63% rename from libraries/shared/mocks/repository.go rename to libraries/shared/mocks/event_repository.go index 1c3efe13..8c3c1c36 100644 --- a/libraries/shared/mocks/repository.go +++ b/libraries/shared/mocks/event_repository.go @@ -23,76 +23,68 @@ import ( "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres" ) -type MockRepository struct { +type MockEventRepository struct { createError error markHeaderCheckedError error MarkHeaderCheckedPassedHeaderIDs []int64 - CreatedHeaderIds []int64 missingHeaders []core.Header allHeaders []core.Header missingHeadersError error PassedStartingBlockNumber int64 PassedEndingBlockNumber int64 - PassedHeaderID int64 PassedModels []interface{} SetDbCalled bool CreateCalledCounter int } -func (repository *MockRepository) Create(headerID int64, models []interface{}) error { - repository.PassedHeaderID = headerID +func (repository *MockEventRepository) Create(models []interface{}) error { repository.PassedModels = models - repository.CreatedHeaderIds = append(repository.CreatedHeaderIds, headerID) repository.CreateCalledCounter++ return repository.createError } -func (repository *MockRepository) MarkHeaderChecked(headerID int64) error { +func (repository *MockEventRepository) MarkHeaderChecked(headerID int64) error { repository.MarkHeaderCheckedPassedHeaderIDs = append(repository.MarkHeaderCheckedPassedHeaderIDs, headerID) return repository.markHeaderCheckedError } -func (repository *MockRepository) MissingHeaders(startingBlockNumber, endingBlockNumber int64) ([]core.Header, error) { +func (repository *MockEventRepository) MissingHeaders(startingBlockNumber, endingBlockNumber int64) ([]core.Header, error) { repository.PassedStartingBlockNumber = startingBlockNumber repository.PassedEndingBlockNumber = endingBlockNumber return repository.missingHeaders, repository.missingHeadersError } -func (repository *MockRepository) RecheckHeaders(startingBlockNumber, endingBlockNumber int64) ([]core.Header, error) { +func (repository *MockEventRepository) RecheckHeaders(startingBlockNumber, endingBlockNumber int64) ([]core.Header, error) { repository.PassedStartingBlockNumber = startingBlockNumber repository.PassedEndingBlockNumber = endingBlockNumber return repository.allHeaders, nil } -func (repository *MockRepository) SetDB(db *postgres.DB) { +func (repository *MockEventRepository) SetDB(db *postgres.DB) { repository.SetDbCalled = true } -func (repository *MockRepository) SetMissingHeadersError(e error) { +func (repository *MockEventRepository) SetMissingHeadersError(e error) { repository.missingHeadersError = e } -func (repository *MockRepository) SetAllHeaders(headers []core.Header) { +func (repository *MockEventRepository) SetAllHeaders(headers []core.Header) { repository.allHeaders = headers } -func (repository *MockRepository) SetMissingHeaders(headers []core.Header) { +func (repository *MockEventRepository) SetMissingHeaders(headers []core.Header) { repository.missingHeaders = headers } -func (repository *MockRepository) SetMarkHeaderCheckedError(e error) { +func (repository *MockEventRepository) SetMarkHeaderCheckedError(e error) { repository.markHeaderCheckedError = e } -func (repository *MockRepository) SetCreateError(e error) { +func (repository *MockEventRepository) SetCreateError(e error) { repository.createError = e } -func (repository *MockRepository) AssertMarkHeaderCheckedCalledWith(i int64) { +func (repository *MockEventRepository) AssertMarkHeaderCheckedCalledWith(i int64) { Expect(repository.MarkHeaderCheckedPassedHeaderIDs).To(ContainElement(i)) } - -func (repository *MockRepository) AssertMarkHeaderCheckedNotCalled() { - Expect(len(repository.MarkHeaderCheckedPassedHeaderIDs)).To(Equal(0)) -} diff --git a/libraries/shared/mocks/transformer.go b/libraries/shared/mocks/event_transformer.go similarity index 63% rename from libraries/shared/mocks/transformer.go rename to libraries/shared/mocks/event_transformer.go index 034bbe69..9a8c5e08 100644 --- a/libraries/shared/mocks/transformer.go +++ b/libraries/shared/mocks/event_transformer.go @@ -20,40 +20,39 @@ import ( "github.com/vulcanize/vulcanizedb/libraries/shared/transformer" "github.com/vulcanize/vulcanizedb/pkg/core" "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres" + "github.com/vulcanize/vulcanizedb/pkg/fakes" ) -type MockTransformer struct { +type MockEventTransformer struct { ExecuteWasCalled bool ExecuteError error PassedLogs []core.HeaderSyncLog - PassedHeaderID int64 config transformer.EventTransformerConfig } -func (mh *MockTransformer) Execute(logs []core.HeaderSyncLog, headerID int64) error { - if mh.ExecuteError != nil { - return mh.ExecuteError +func (t *MockEventTransformer) Execute(logs []core.HeaderSyncLog) error { + if t.ExecuteError != nil { + return t.ExecuteError } - mh.ExecuteWasCalled = true - mh.PassedLogs = logs - mh.PassedHeaderID = headerID + t.ExecuteWasCalled = true + t.PassedLogs = logs return nil } -func (mh *MockTransformer) GetConfig() transformer.EventTransformerConfig { - return mh.config +func (t *MockEventTransformer) GetConfig() transformer.EventTransformerConfig { + return t.config } -func (mh *MockTransformer) SetTransformerConfig(config transformer.EventTransformerConfig) { - mh.config = config +func (t *MockEventTransformer) SetTransformerConfig(config transformer.EventTransformerConfig) { + t.config = config } -func (mh *MockTransformer) FakeTransformerInitializer(db *postgres.DB) transformer.EventTransformer { - return mh +func (t *MockEventTransformer) FakeTransformerInitializer(db *postgres.DB) transformer.EventTransformer { + return t } var FakeTransformerConfig = transformer.EventTransformerConfig{ TransformerName: "FakeTransformer", - ContractAddresses: []string{"FakeAddress"}, - Topic: "FakeTopic", + ContractAddresses: []string{fakes.FakeAddress.Hex()}, + Topic: fakes.FakeHash.Hex(), } diff --git a/libraries/shared/mocks/log_delegator.go b/libraries/shared/mocks/log_delegator.go new file mode 100644 index 00000000..95ad5fe8 --- /dev/null +++ b/libraries/shared/mocks/log_delegator.go @@ -0,0 +1,36 @@ +// VulcanizeDB +// Copyright © 2019 Vulcanize + +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU Affero General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU Affero General Public License for more details. + +// You should have received a copy of the GNU Affero General Public License +// along with this program. If not, see . + +package mocks + +import ( + "github.com/vulcanize/vulcanizedb/libraries/shared/transformer" +) + +type MockLogDelegator struct { + AddedTransformers []transformer.EventTransformer + DelegateCalled bool + DelegateError error +} + +func (delegator *MockLogDelegator) AddTransformer(t transformer.EventTransformer) { + delegator.AddedTransformers = append(delegator.AddedTransformers, t) +} + +func (delegator *MockLogDelegator) DelegateLogs() error { + delegator.DelegateCalled = true + return delegator.DelegateError +} diff --git a/libraries/shared/mocks/log_extractor.go b/libraries/shared/mocks/log_extractor.go new file mode 100644 index 00000000..b179b1ea --- /dev/null +++ b/libraries/shared/mocks/log_extractor.go @@ -0,0 +1,37 @@ +// VulcanizeDB +// Copyright © 2019 Vulcanize + +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU Affero General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU Affero General Public License for more details. + +// You should have received a copy of the GNU Affero General Public License +// along with this program. If not, see . + +package mocks + +import ( + "github.com/vulcanize/vulcanizedb/libraries/shared/constants" + "github.com/vulcanize/vulcanizedb/libraries/shared/transformer" +) + +type MockLogExtractor struct { + AddedConfigs []transformer.EventTransformerConfig + ExtractLogsCalled bool + ExtractLogsError error +} + +func (extractor *MockLogExtractor) AddTransformerConfig(config transformer.EventTransformerConfig) { + extractor.AddedConfigs = append(extractor.AddedConfigs, config) +} + +func (extractor *MockLogExtractor) ExtractLogs(recheckHeaders constants.TransformerExecution) error { + extractor.ExtractLogsCalled = true + return extractor.ExtractLogsError +} diff --git a/libraries/shared/mocks/log_note_converter.go b/libraries/shared/mocks/log_fetcher.go similarity index 55% rename from libraries/shared/mocks/log_note_converter.go rename to libraries/shared/mocks/log_fetcher.go index 7125fc5e..049a1fbf 100644 --- a/libraries/shared/mocks/log_note_converter.go +++ b/libraries/shared/mocks/log_fetcher.go @@ -17,26 +17,24 @@ package mocks import ( + "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/core/types" + "github.com/vulcanize/vulcanizedb/pkg/core" ) -type MockLogNoteConverter struct { - err error - returnModels []interface{} - PassedLogs []types.Log - ToModelsCalledCounter int +type MockLogFetcher struct { + ContractAddresses []common.Address + FetchCalled bool + MissingHeader core.Header + ReturnError error + ReturnLogs []types.Log + Topics []common.Hash } -func (converter *MockLogNoteConverter) ToModels(ethLogs []types.Log) ([]interface{}, error) { - converter.PassedLogs = ethLogs - converter.ToModelsCalledCounter++ - return converter.returnModels, converter.err -} - -func (converter *MockLogNoteConverter) SetConverterError(e error) { - converter.err = e -} - -func (converter *MockLogNoteConverter) SetReturnModels(models []interface{}) { - converter.returnModels = models +func (fetcher *MockLogFetcher) FetchLogs(contractAddresses []common.Address, topics []common.Hash, missingHeader core.Header) ([]types.Log, error) { + fetcher.FetchCalled = true + fetcher.ContractAddresses = contractAddresses + fetcher.Topics = topics + fetcher.MissingHeader = missingHeader + return fetcher.ReturnLogs, fetcher.ReturnError } diff --git a/libraries/shared/mocks/mappings.go b/libraries/shared/mocks/storage_mappings.go similarity index 100% rename from libraries/shared/mocks/mappings.go rename to libraries/shared/mocks/storage_mappings.go diff --git a/libraries/shared/repository/repository.go b/libraries/shared/repository/repository.go index 8aea8c49..155e27ae 100644 --- a/libraries/shared/repository/repository.go +++ b/libraries/shared/repository/repository.go @@ -16,192 +16,12 @@ package repository -import ( - "bytes" - "database/sql" - "database/sql/driver" - "fmt" - "github.com/ethereum/go-ethereum/core/types" - "github.com/jmoiron/sqlx" - "github.com/lib/pq" - "github.com/sirupsen/logrus" +import "github.com/jmoiron/sqlx" - "github.com/vulcanize/vulcanizedb/libraries/shared/constants" - "github.com/vulcanize/vulcanizedb/pkg/core" - "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres" -) - -const insertHeaderSyncLogQuery = `INSERT INTO header_sync_logs - (header_id, address, topics, data, block_number, block_hash, tx_index, tx_hash, log_index, raw) - VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10) ON CONFLICT DO NOTHING RETURNING id` - -func MarkHeaderChecked(headerID int64, db *postgres.DB, checkedHeadersColumn string) error { - _, err := db.Exec(`INSERT INTO public.checked_headers (header_id, `+checkedHeadersColumn+`) - VALUES ($1, $2) - ON CONFLICT (header_id) DO - UPDATE SET `+checkedHeadersColumn+` = checked_headers.`+checkedHeadersColumn+` + 1`, headerID, 1) - return err -} - -func MarkHeaderCheckedInTransaction(headerID int64, tx *sqlx.Tx, checkedHeadersColumn string) error { +func MarkContractWatcherHeaderCheckedInTransaction(headerID int64, tx *sqlx.Tx, checkedHeadersColumn string) error { _, err := tx.Exec(`INSERT INTO public.checked_headers (header_id, `+checkedHeadersColumn+`) VALUES ($1, $2) ON CONFLICT (header_id) DO - UPDATE SET `+checkedHeadersColumn+` = checked_headers.`+checkedHeadersColumn+` + 1`, headerID, 1) + UPDATE SET `+checkedHeadersColumn+` = checked_headers.`+checkedHeadersColumn+` + 1`, headerID, 1) return err } - -// Treats a header as missing if it's not in the headers table, or not checked for some log type -func MissingHeaders(startingBlockNumber, endingBlockNumber int64, db *postgres.DB, notCheckedSQL string) ([]core.Header, error) { - var result []core.Header - var query string - var err error - - if endingBlockNumber == -1 { - query = `SELECT headers.id, headers.block_number, headers.hash FROM headers - LEFT JOIN checked_headers on headers.id = header_id - WHERE (header_id ISNULL OR ` + notCheckedSQL + `) - AND headers.block_number >= $1 - AND headers.eth_node_fingerprint = $2` - err = db.Select(&result, query, startingBlockNumber, db.Node.ID) - } else { - query = `SELECT headers.id, headers.block_number, headers.hash FROM headers - LEFT JOIN checked_headers on headers.id = header_id - WHERE (header_id ISNULL OR ` + notCheckedSQL + `) - AND headers.block_number >= $1 - AND headers.block_number <= $2 - AND headers.eth_node_fingerprint = $3` - err = db.Select(&result, query, startingBlockNumber, endingBlockNumber, db.Node.ID) - } - - return result, err -} - -func GetCheckedColumnNames(db *postgres.DB) ([]string, error) { - // Query returns `[]driver.Value`, nullable polymorphic interface - var queryResult []driver.Value - columnNamesQuery := - `SELECT column_name FROM information_schema.columns - WHERE table_schema = 'public' - AND table_name = 'checked_headers' - AND column_name <> 'id' - AND column_name <> 'header_id';` - - err := db.Select(&queryResult, columnNamesQuery) - if err != nil { - return []string{}, err - } - - // Transform column names from `driver.Value` to strings - var columnNames []string - for _, result := range queryResult { - if columnName, ok := result.(string); ok { - columnNames = append(columnNames, columnName) - } else { - return []string{}, fmt.Errorf("incorrect value for checked_headers column name") - } - } - - return columnNames, nil -} - -// Builds a SQL string that checks if any column should be checked/rechecked. -// Defaults to FALSE when no columns are provided. -// Ex: ["columnA", "columnB"] => "NOT (columnA!=0 AND columnB!=0)" -// [] => "FALSE" -func CreateHeaderCheckedPredicateSQL(boolColumns []string, recheckHeaders constants.TransformerExecution) string { - if len(boolColumns) == 0 { - return "FALSE" - } - - if recheckHeaders { - return createHeaderCheckedPredicateSQLForRecheckedHeaders(boolColumns) - } else { - return createHeaderCheckedPredicateSQLForMissingHeaders(boolColumns) - } -} - -func CreateLogs(headerID int64, logs []types.Log, db *postgres.DB) ([]core.HeaderSyncLog, error) { - tx, txErr := db.Beginx() - if txErr != nil { - return nil, txErr - } - var results []core.HeaderSyncLog - for _, log := range logs { - logID, err := insertLog(headerID, log, tx) - if err != nil { - if logWasADuplicate(err) { - continue - } - rollbackErr := tx.Rollback() - if rollbackErr != nil { - logrus.Errorf("failed to rollback header sync log insert: %s", rollbackErr.Error()) - } - return nil, err - } - results = append(results, buildLog(logID, headerID, log)) - } - return results, tx.Commit() -} - -func logWasADuplicate(err error) bool { - return err == sql.ErrNoRows -} - -func insertLog(headerID int64, log types.Log, tx *sqlx.Tx) (int64, error) { - topics := buildTopics(log) - raw, jsonErr := log.MarshalJSON() - if jsonErr != nil { - return 0, jsonErr - } - var logID int64 - err := tx.QueryRowx(insertHeaderSyncLogQuery, headerID, log.Address.Hex(), topics, log.Data, log.BlockNumber, - log.BlockHash.Hex(), log.TxIndex, log.TxHash.Hex(), log.Index, raw).Scan(&logID) - return logID, err -} - -func buildLog(logID int64, headerID int64, log types.Log) core.HeaderSyncLog { - return core.HeaderSyncLog{ - ID: logID, - HeaderID: headerID, - Log: log, - Transformed: false, - } -} - -func buildTopics(log types.Log) pq.ByteaArray { - var topics pq.ByteaArray - for _, topic := range log.Topics { - topics = append(topics, topic.Bytes()) - } - return topics -} - -func createHeaderCheckedPredicateSQLForMissingHeaders(boolColumns []string) string { - var result bytes.Buffer - result.WriteString(" (") - - // Loop excluding last column name - for _, column := range boolColumns[:len(boolColumns)-1] { - result.WriteString(fmt.Sprintf("%v=0 OR ", column)) - } - - result.WriteString(fmt.Sprintf("%v=0)", boolColumns[len(boolColumns)-1])) - - return result.String() -} - -func createHeaderCheckedPredicateSQLForRecheckedHeaders(boolColumns []string) string { - var result bytes.Buffer - result.WriteString(" (") - - // Loop excluding last column name - for _, column := range boolColumns[:len(boolColumns)-1] { - result.WriteString(fmt.Sprintf("%v<%s OR ", column, constants.RecheckHeaderCap)) - } - - // No trailing "OR" for the last column name - result.WriteString(fmt.Sprintf("%v<%s)", boolColumns[len(boolColumns)-1], constants.RecheckHeaderCap)) - - return result.String() -} diff --git a/libraries/shared/repository/repository_test.go b/libraries/shared/repository/repository_test.go index 9eb33464..72a613af 100644 --- a/libraries/shared/repository/repository_test.go +++ b/libraries/shared/repository/repository_test.go @@ -17,39 +17,25 @@ package repository_test import ( - "fmt" - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/core/types" - "github.com/lib/pq" - "github.com/vulcanize/vulcanizedb/libraries/shared/test_data" - "math/rand" - "strconv" - . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" - - "github.com/vulcanize/vulcanizedb/libraries/shared/constants" - shared "github.com/vulcanize/vulcanizedb/libraries/shared/repository" - r2 "github.com/vulcanize/vulcanizedb/pkg/contract_watcher/header/repository" - "github.com/vulcanize/vulcanizedb/pkg/core" - "github.com/vulcanize/vulcanizedb/pkg/datastore" + "github.com/vulcanize/vulcanizedb/libraries/shared/repository" "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres" "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres/repositories" "github.com/vulcanize/vulcanizedb/pkg/fakes" "github.com/vulcanize/vulcanizedb/test_config" ) -var _ = Describe("Repository", func() { - var ( - checkedHeadersColumn string - db *postgres.DB - ) +var _ = Describe("", func() { + Describe("MarkContractWatcherHeaderCheckedInTransaction", func() { + var ( + checkedHeadersColumn string + db *postgres.DB + ) - Describe("MarkHeaderChecked", func() { BeforeEach(func() { db = test_config.NewTestDB(test_config.NewTestNode()) test_config.CleanTestDB(db) - checkedHeadersColumn = "test_column_checked" _, migrateErr := db.Exec(`ALTER TABLE public.checked_headers ADD COLUMN ` + checkedHeadersColumn + ` integer`) @@ -61,395 +47,21 @@ var _ = Describe("Repository", func() { Expect(cleanupMigrateErr).NotTo(HaveOccurred()) }) - It("marks passed column as checked for passed header", func() { - headerRepository := repositories.NewHeaderRepository(db) - headerID, headerErr := headerRepository.CreateOrUpdateHeader(fakes.FakeHeader) - Expect(headerErr).NotTo(HaveOccurred()) - - err := shared.MarkHeaderChecked(headerID, db, checkedHeadersColumn) - - Expect(err).NotTo(HaveOccurred()) - var checkedCount int - fetchErr := db.Get(&checkedCount, `SELECT `+checkedHeadersColumn+` FROM public.checked_headers LIMIT 1`) - Expect(fetchErr).NotTo(HaveOccurred()) - Expect(checkedCount).To(Equal(1)) - }) - }) - - Describe("MarkHeaderCheckedInTransaction", func() { - BeforeEach(func() { - db = test_config.NewTestDB(test_config.NewTestNode()) - test_config.CleanTestDB(db) - - checkedHeadersColumn = "test_column_checked" - _, migrateErr := db.Exec(`ALTER TABLE public.checked_headers - ADD COLUMN ` + checkedHeadersColumn + ` integer`) - Expect(migrateErr).NotTo(HaveOccurred()) - }) - - AfterEach(func() { - _, cleanupMigrateErr := db.Exec(`ALTER TABLE public.checked_headers DROP COLUMN ` + checkedHeadersColumn) - Expect(cleanupMigrateErr).NotTo(HaveOccurred()) - }) - - It("marks passed column as checked for passed header within a passed transaction", func() { + It("marks passed header as checked within a passed transaction", func() { headerRepository := repositories.NewHeaderRepository(db) headerID, headerErr := headerRepository.CreateOrUpdateHeader(fakes.FakeHeader) Expect(headerErr).NotTo(HaveOccurred()) tx, txErr := db.Beginx() Expect(txErr).NotTo(HaveOccurred()) - err := shared.MarkHeaderCheckedInTransaction(headerID, tx, checkedHeadersColumn) - + err := repository.MarkContractWatcherHeaderCheckedInTransaction(headerID, tx, checkedHeadersColumn) Expect(err).NotTo(HaveOccurred()) commitErr := tx.Commit() Expect(commitErr).NotTo(HaveOccurred()) var checkedCount int - fetchErr := db.Get(&checkedCount, `SELECT `+checkedHeadersColumn+` FROM public.checked_headers LIMIT 1`) + fetchErr := db.Get(&checkedCount, `SELECT COUNT(*) FROM public.checked_headers WHERE header_id = $1`, headerID) Expect(fetchErr).NotTo(HaveOccurred()) Expect(checkedCount).To(Equal(1)) }) }) - - Describe("MissingHeaders", func() { - var ( - headerRepository datastore.HeaderRepository - startingBlockNumber int64 - endingBlockNumber int64 - eventSpecificBlockNumber int64 - outOfRangeBlockNumber int64 - blockNumbers []int64 - headerIDs []int64 - notCheckedSQL string - err error - hr r2.HeaderRepository - columnNames []string - ) - - BeforeEach(func() { - db = test_config.NewTestDB(test_config.NewTestNode()) - test_config.CleanTestDB(db) - headerRepository = repositories.NewHeaderRepository(db) - hr = r2.NewHeaderRepository(db) - hr.AddCheckColumns(getExpectedColumnNames()) - - columnNames, err = shared.GetCheckedColumnNames(db) - Expect(err).NotTo(HaveOccurred()) - notCheckedSQL = shared.CreateHeaderCheckedPredicateSQL(columnNames, constants.HeaderMissing) - - startingBlockNumber = rand.Int63() - eventSpecificBlockNumber = startingBlockNumber + 1 - endingBlockNumber = startingBlockNumber + 2 - outOfRangeBlockNumber = endingBlockNumber + 1 - - blockNumbers = []int64{startingBlockNumber, eventSpecificBlockNumber, endingBlockNumber, outOfRangeBlockNumber} - - headerIDs = []int64{} - for _, n := range blockNumbers { - headerID, err := headerRepository.CreateOrUpdateHeader(fakes.GetFakeHeader(n)) - headerIDs = append(headerIDs, headerID) - Expect(err).NotTo(HaveOccurred()) - } - }) - - AfterEach(func() { - test_config.CleanCheckedHeadersTable(db, getExpectedColumnNames()) - }) - - It("only treats headers as checked if the event specific logs have been checked", func() { - //add a checked_header record, but don't mark it check for any of the columns - _, err = db.Exec(`INSERT INTO public.checked_headers (header_id) VALUES ($1)`, headerIDs[1]) - Expect(err).NotTo(HaveOccurred()) - - headers, err := shared.MissingHeaders(startingBlockNumber, endingBlockNumber, db, notCheckedSQL) - - Expect(err).NotTo(HaveOccurred()) - Expect(len(headers)).To(Equal(3)) - Expect(headers[0].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(endingBlockNumber), Equal(eventSpecificBlockNumber))) - Expect(headers[1].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(endingBlockNumber), Equal(eventSpecificBlockNumber))) - Expect(headers[2].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(endingBlockNumber), Equal(eventSpecificBlockNumber))) - }) - - It("only returns headers associated with the current node", func() { - dbTwo := test_config.NewTestDB(core.Node{ID: "second"}) - headerRepositoryTwo := repositories.NewHeaderRepository(dbTwo) - for _, n := range blockNumbers { - _, err = headerRepositoryTwo.CreateOrUpdateHeader(fakes.GetFakeHeader(n + 10)) - Expect(err).NotTo(HaveOccurred()) - } - - Expect(err).NotTo(HaveOccurred()) - nodeOneMissingHeaders, err := shared.MissingHeaders(startingBlockNumber, endingBlockNumber, db, notCheckedSQL) - Expect(err).NotTo(HaveOccurred()) - Expect(len(nodeOneMissingHeaders)).To(Equal(3)) - Expect(nodeOneMissingHeaders[0].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(eventSpecificBlockNumber), Equal(endingBlockNumber))) - Expect(nodeOneMissingHeaders[1].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(eventSpecificBlockNumber), Equal(endingBlockNumber))) - Expect(nodeOneMissingHeaders[2].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(startingBlockNumber), Equal(eventSpecificBlockNumber), Equal(endingBlockNumber))) - - nodeTwoMissingHeaders, err := shared.MissingHeaders(startingBlockNumber, endingBlockNumber+10, dbTwo, notCheckedSQL) - Expect(err).NotTo(HaveOccurred()) - Expect(len(nodeTwoMissingHeaders)).To(Equal(3)) - Expect(nodeTwoMissingHeaders[0].BlockNumber).To(Or(Equal(startingBlockNumber+10), Equal(eventSpecificBlockNumber+10), Equal(endingBlockNumber+10))) - Expect(nodeTwoMissingHeaders[1].BlockNumber).To(Or(Equal(startingBlockNumber+10), Equal(eventSpecificBlockNumber+10), Equal(endingBlockNumber+10))) - }) - - It("handles an ending block of -1 ", func() { - endingBlock := int64(-1) - headers, err := shared.MissingHeaders(startingBlockNumber, endingBlock, db, notCheckedSQL) - - Expect(err).NotTo(HaveOccurred()) - Expect(len(headers)).To(Equal(4)) - Expect(headers[0].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(endingBlockNumber), Equal(eventSpecificBlockNumber), Equal(outOfRangeBlockNumber))) - Expect(headers[1].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(endingBlockNumber), Equal(eventSpecificBlockNumber), Equal(outOfRangeBlockNumber))) - Expect(headers[2].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(endingBlockNumber), Equal(eventSpecificBlockNumber), Equal(outOfRangeBlockNumber))) - Expect(headers[3].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(endingBlockNumber), Equal(eventSpecificBlockNumber), Equal(outOfRangeBlockNumber))) - - }) - - It("when a the `notCheckedSQL` argument allows for rechecks it returns headers where the checked count is less than the maximum", func() { - columnName := columnNames[0] - recheckedSQL := shared.CreateHeaderCheckedPredicateSQL([]string{columnName}, constants.HeaderRecheck) - // mark every header checked at least once - // header 4 is marked the maximum number of times, it it is not longer checked - - maxCheckCount, intConversionErr := strconv.Atoi(constants.RecheckHeaderCap) - Expect(intConversionErr).NotTo(HaveOccurred()) - - markHeaderOneErr := shared.MarkHeaderChecked(headerIDs[0], db, columnName) - Expect(markHeaderOneErr).NotTo(HaveOccurred()) - markHeaderTwoErr := shared.MarkHeaderChecked(headerIDs[1], db, columnName) - Expect(markHeaderTwoErr).NotTo(HaveOccurred()) - markHeaderThreeErr := shared.MarkHeaderChecked(headerIDs[2], db, columnName) - Expect(markHeaderThreeErr).NotTo(HaveOccurred()) - for i := 0; i <= maxCheckCount; i++ { - markHeaderFourErr := shared.MarkHeaderChecked(headerIDs[3], db, columnName) - Expect(markHeaderFourErr).NotTo(HaveOccurred()) - } - - headers, err := shared.MissingHeaders(1, -1, db, recheckedSQL) - - Expect(err).NotTo(HaveOccurred()) - Expect(len(headers)).To(Equal(3)) - Expect(headers[0].Id).To(Or(Equal(headerIDs[0]), Equal(headerIDs[1]), Equal(headerIDs[2]))) - Expect(headers[1].Id).To(Or(Equal(headerIDs[0]), Equal(headerIDs[1]), Equal(headerIDs[2]))) - Expect(headers[2].Id).To(Or(Equal(headerIDs[0]), Equal(headerIDs[1]), Equal(headerIDs[2]))) - }) - }) - - Describe("GetCheckedColumnNames", func() { - It("gets the column names from checked_headers", func() { - db := test_config.NewTestDB(test_config.NewTestNode()) - hr := r2.NewHeaderRepository(db) - hr.AddCheckColumns(getExpectedColumnNames()) - test_config.CleanTestDB(db) - expectedColumnNames := getExpectedColumnNames() - actualColumnNames, err := shared.GetCheckedColumnNames(db) - Expect(err).NotTo(HaveOccurred()) - Expect(actualColumnNames).To(Equal(expectedColumnNames)) - test_config.CleanCheckedHeadersTable(db, getExpectedColumnNames()) - }) - }) - - Describe("CreateHeaderCheckedPredicateSQL", func() { - Describe("for headers that haven't been checked for logs", func() { - It("generates a correct SQL string for one column", func() { - columns := []string{"columnA"} - expected := " (columnA=0)" - actual := shared.CreateHeaderCheckedPredicateSQL(columns, constants.HeaderMissing) - Expect(actual).To(Equal(expected)) - }) - - It("generates a correct SQL string for several columns", func() { - columns := []string{"columnA", "columnB"} - expected := " (columnA=0 OR columnB=0)" - actual := shared.CreateHeaderCheckedPredicateSQL(columns, constants.HeaderMissing) - Expect(actual).To(Equal(expected)) - }) - - It("defaults to FALSE when there are no columns", func() { - expected := "FALSE" - actual := shared.CreateHeaderCheckedPredicateSQL([]string{}, constants.HeaderMissing) - Expect(actual).To(Equal(expected)) - }) - }) - - Describe("for headers that are being rechecked for logs", func() { - It("generates a correct SQL string for rechecking headers for one column", func() { - columns := []string{"columnA"} - expected := fmt.Sprintf(" (columnA<%s)", constants.RecheckHeaderCap) - actual := shared.CreateHeaderCheckedPredicateSQL(columns, constants.HeaderRecheck) - Expect(actual).To(Equal(expected)) - }) - - It("generates a correct SQL string for rechecking headers for several columns", func() { - columns := []string{"columnA", "columnB"} - expected := fmt.Sprintf(" (columnA<%s OR columnB<%s)", constants.RecheckHeaderCap, constants.RecheckHeaderCap) - actual := shared.CreateHeaderCheckedPredicateSQL(columns, constants.HeaderRecheck) - Expect(actual).To(Equal(expected)) - }) - - It("defaults to FALSE when there are no columns", func() { - expected := "FALSE" - actual := shared.CreateHeaderCheckedPredicateSQL([]string{}, constants.HeaderRecheck) - Expect(actual).To(Equal(expected)) - }) - }) - }) - - Describe("CreateHeaderSyncLogs", func() { - var headerID int64 - - type HeaderSyncLog struct { - ID int64 - HeaderID int64 `db:"header_id"` - Address string - Topics pq.ByteaArray - Data []byte - BlockNumber uint64 `db:"block_number"` - BlockHash string `db:"block_hash"` - TxHash string `db:"tx_hash"` - TxIndex uint `db:"tx_index"` - LogIndex uint `db:"log_index"` - Transformed bool - Raw []byte - } - - BeforeEach(func() { - db = test_config.NewTestDB(test_config.NewTestNode()) - test_config.CleanTestDB(db) - headerRepository := repositories.NewHeaderRepository(db) - var headerErr error - headerID, headerErr = headerRepository.CreateOrUpdateHeader(fakes.FakeHeader) - Expect(headerErr).NotTo(HaveOccurred()) - }) - - It("writes a log to the db", func() { - log := test_data.GenericTestLog() - - _, err := shared.CreateLogs(headerID, []types.Log{log}, db) - - Expect(err).NotTo(HaveOccurred()) - var dbLog HeaderSyncLog - lookupErr := db.Get(&dbLog, `SELECT * FROM header_sync_logs`) - Expect(lookupErr).NotTo(HaveOccurred()) - Expect(dbLog.ID).NotTo(BeZero()) - Expect(dbLog.HeaderID).To(Equal(headerID)) - Expect(dbLog.Address).To(Equal(log.Address.Hex())) - Expect(dbLog.Topics[0]).To(Equal(log.Topics[0].Bytes())) - Expect(dbLog.Topics[1]).To(Equal(log.Topics[1].Bytes())) - Expect(dbLog.Data).To(Equal(log.Data)) - Expect(dbLog.BlockNumber).To(Equal(log.BlockNumber)) - Expect(dbLog.BlockHash).To(Equal(log.BlockHash.Hex())) - Expect(dbLog.TxIndex).To(Equal(log.TxIndex)) - Expect(dbLog.TxHash).To(Equal(log.TxHash.Hex())) - Expect(dbLog.LogIndex).To(Equal(log.Index)) - expectedRaw, jsonErr := log.MarshalJSON() - Expect(jsonErr).NotTo(HaveOccurred()) - Expect(dbLog.Raw).To(MatchJSON(expectedRaw)) - Expect(dbLog.Transformed).To(BeFalse()) - }) - - It("writes several logs to the db", func() { - log1 := test_data.GenericTestLog() - log2 := test_data.GenericTestLog() - logs := []types.Log{log1, log2} - - _, err := shared.CreateLogs(headerID, logs, db) - - Expect(err).NotTo(HaveOccurred()) - var count int - lookupErr := db.Get(&count, `SELECT COUNT(*) FROM header_sync_logs`) - Expect(lookupErr).NotTo(HaveOccurred()) - Expect(count).To(Equal(len(logs))) - }) - - It("persists record that can be unpacked into types.Log", func() { - // important if we want to decouple log persistence from transforming and still make use of - // tools on types.Log like abi.Unpack - - log := test_data.GenericTestLog() - - _, err := shared.CreateLogs(headerID, []types.Log{log}, db) - - Expect(err).NotTo(HaveOccurred()) - var dbLog HeaderSyncLog - lookupErr := db.Get(&dbLog, `SELECT * FROM header_sync_logs`) - Expect(lookupErr).NotTo(HaveOccurred()) - - var logTopics []common.Hash - for _, topic := range dbLog.Topics { - logTopics = append(logTopics, common.BytesToHash(topic)) - } - - reconstructedLog := types.Log{ - Address: common.HexToAddress(dbLog.Address), - Topics: logTopics, - Data: dbLog.Data, - BlockNumber: dbLog.BlockNumber, - TxHash: common.HexToHash(dbLog.TxHash), - TxIndex: dbLog.TxIndex, - BlockHash: common.HexToHash(dbLog.BlockHash), - Index: dbLog.LogIndex, - Removed: false, - } - Expect(reconstructedLog).To(Equal(log)) - }) - - It("does not duplicate logs", func() { - log := test_data.GenericTestLog() - - results, err := shared.CreateLogs(headerID, []types.Log{log, log}, db) - - Expect(err).NotTo(HaveOccurred()) - Expect(len(results)).To(Equal(1)) - var count int - lookupErr := db.Get(&count, `SELECT COUNT(*) FROM header_sync_logs`) - Expect(lookupErr).NotTo(HaveOccurred()) - Expect(count).To(Equal(1)) - }) - - It("returns results with log id and header id for persisted logs", func() { - log1 := test_data.GenericTestLog() - log2 := test_data.GenericTestLog() - logs := []types.Log{log1, log2} - - results, err := shared.CreateLogs(headerID, logs, db) - - Expect(err).NotTo(HaveOccurred()) - Expect(len(results)).To(Equal(len(logs))) - var log1ID, log2ID int64 - lookupErr := db.Get(&log1ID, `SELECT id FROM header_sync_logs WHERE data = $1`, log1.Data) - Expect(lookupErr).NotTo(HaveOccurred()) - lookup2Err := db.Get(&log2ID, `SELECT id FROM header_sync_logs WHERE data = $1`, log2.Data) - Expect(lookup2Err).NotTo(HaveOccurred()) - Expect(results[0].ID).To(Or(Equal(log1ID), Equal(log2ID))) - Expect(results[1].ID).To(Or(Equal(log1ID), Equal(log2ID))) - Expect(results[0].HeaderID).To(Equal(headerID)) - Expect(results[1].HeaderID).To(Equal(headerID)) - }) - - It("returns results with properties for persisted logs", func() { - log1 := test_data.GenericTestLog() - log2 := test_data.GenericTestLog() - logs := []types.Log{log1, log2} - - results, err := shared.CreateLogs(headerID, logs, db) - - Expect(err).NotTo(HaveOccurred()) - Expect(len(results)).To(Equal(len(logs))) - Expect(results[0].Log).To(Or(Equal(log1), Equal(log2))) - Expect(results[1].Log).To(Or(Equal(log1), Equal(log2))) - Expect(results[0].Transformed).To(BeFalse()) - Expect(results[1].Transformed).To(BeFalse()) - }) - }) }) - -func getExpectedColumnNames() []string { - return []string{ - "column_1", - "column_2", - "column_3", - "column_4", - } -} diff --git a/libraries/shared/test_data/generic.go b/libraries/shared/test_data/generic.go index f00b51a0..c414ab0e 100644 --- a/libraries/shared/test_data/generic.go +++ b/libraries/shared/test_data/generic.go @@ -17,14 +17,12 @@ package test_data import ( + "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common/hexutil" + "github.com/ethereum/go-ethereum/core/types" + "github.com/vulcanize/vulcanizedb/libraries/shared/transformer" "math/rand" "time" - - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/core/types" - - "github.com/vulcanize/vulcanizedb/libraries/shared/transformer" ) type GenericModel struct{} diff --git a/libraries/shared/transformer/event_transformer.go b/libraries/shared/transformer/event_transformer.go index 9e888ccf..c3dca5ff 100644 --- a/libraries/shared/transformer/event_transformer.go +++ b/libraries/shared/transformer/event_transformer.go @@ -18,13 +18,12 @@ package transformer import ( "github.com/ethereum/go-ethereum/common" - "github.com/vulcanize/vulcanizedb/pkg/core" "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres" ) type EventTransformer interface { - Execute(logs []core.HeaderSyncLog, headerID int64) error + Execute(logs []core.HeaderSyncLog) error GetConfig() EventTransformerConfig } diff --git a/libraries/shared/watcher/event_watcher.go b/libraries/shared/watcher/event_watcher.go index 5d8f3308..c682400c 100644 --- a/libraries/shared/watcher/event_watcher.go +++ b/libraries/shared/watcher/event_watcher.go @@ -17,141 +17,67 @@ package watcher import ( - "fmt" - "github.com/vulcanize/vulcanizedb/libraries/shared/transactions" - - "github.com/ethereum/go-ethereum/common" "github.com/sirupsen/logrus" - "github.com/vulcanize/vulcanizedb/libraries/shared/chunker" "github.com/vulcanize/vulcanizedb/libraries/shared/constants" "github.com/vulcanize/vulcanizedb/libraries/shared/fetcher" - "github.com/vulcanize/vulcanizedb/libraries/shared/repository" + "github.com/vulcanize/vulcanizedb/libraries/shared/logs" + "github.com/vulcanize/vulcanizedb/libraries/shared/transactions" "github.com/vulcanize/vulcanizedb/libraries/shared/transformer" "github.com/vulcanize/vulcanizedb/pkg/core" "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres" + "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres/repositories" ) type EventWatcher struct { - Transformers []transformer.EventTransformer - BlockChain core.BlockChain - DB *postgres.DB - Fetcher fetcher.ILogFetcher - Chunker chunker.Chunker - Addresses []common.Address - Topics []common.Hash - StartingBlock *int64 - Syncer transactions.ITransactionsSyncer + blockChain core.BlockChain + db *postgres.DB + LogDelegator logs.ILogDelegator + LogExtractor logs.ILogExtractor } func NewEventWatcher(db *postgres.DB, bc core.BlockChain) EventWatcher { - logChunker := chunker.NewLogChunker() - logFetcher := fetcher.NewLogFetcher(bc) - transactionSyncer := transactions.NewTransactionsSyncer(db, bc) + extractor := &logs.LogExtractor{ + Fetcher: fetcher.NewLogFetcher(bc), + CheckedHeadersRepository: repositories.NewCheckedHeadersRepository(db), + LogRepository: repositories.NewHeaderSyncLogRepository(db), + Syncer: transactions.NewTransactionsSyncer(db, bc), + } + logTransformer := &logs.LogDelegator{ + Chunker: chunker.NewLogChunker(), + LogRepository: repositories.NewHeaderSyncLogRepository(db), + } return EventWatcher{ - BlockChain: bc, - DB: db, - Fetcher: logFetcher, - Chunker: logChunker, - Syncer: transactionSyncer, + blockChain: bc, + db: db, + LogExtractor: extractor, + LogDelegator: logTransformer, } } -// Adds transformers to the watcher and updates the chunker, so that it will consider the new transformers. +// Adds transformers to the watcher so that their logs will be extracted and delegated. func (watcher *EventWatcher) AddTransformers(initializers []transformer.EventTransformerInitializer) { - var contractAddresses []common.Address - var topic0s []common.Hash - var configs []transformer.EventTransformerConfig - for _, initializer := range initializers { - t := initializer(watcher.DB) - watcher.Transformers = append(watcher.Transformers, t) + t := initializer(watcher.db) - config := t.GetConfig() - configs = append(configs, config) - - if watcher.StartingBlock == nil { - watcher.StartingBlock = &config.StartingBlockNumber - } else if earlierStartingBlockNumber(config.StartingBlockNumber, *watcher.StartingBlock) { - watcher.StartingBlock = &config.StartingBlockNumber - } - - addresses := transformer.HexStringsToAddresses(config.ContractAddresses) - contractAddresses = append(contractAddresses, addresses...) - topic0s = append(topic0s, common.HexToHash(config.Topic)) + watcher.LogDelegator.AddTransformer(t) + watcher.LogExtractor.AddTransformerConfig(t.GetConfig()) } - - watcher.Addresses = append(watcher.Addresses, contractAddresses...) - watcher.Topics = append(watcher.Topics, topic0s...) - watcher.Chunker.AddConfigs(configs) } +// Extracts and delegates watched log events. func (watcher *EventWatcher) Execute(recheckHeaders constants.TransformerExecution) error { - if watcher.Transformers == nil { - return fmt.Errorf("No transformers added to watcher") + extractErr := watcher.LogExtractor.ExtractLogs(recheckHeaders) + if extractErr != nil { + logrus.Errorf("error extracting logs in event watcher: %s", extractErr.Error()) + return extractErr } - checkedColumnNames, err := repository.GetCheckedColumnNames(watcher.DB) - if err != nil { - return err - } - notCheckedSQL := repository.CreateHeaderCheckedPredicateSQL(checkedColumnNames, recheckHeaders) - - missingHeaders, err := repository.MissingHeaders(*watcher.StartingBlock, -1, watcher.DB, notCheckedSQL) - if err != nil { - logrus.Error("Couldn't fetch missing headers in watcher: ", err) - return err + delegateErr := watcher.LogDelegator.DelegateLogs() + if delegateErr != nil { + logrus.Errorf("error delegating logs in event watcher: %s", delegateErr.Error()) + return delegateErr } - for _, header := range missingHeaders { - // TODO Extend FetchLogs for doing several blocks at a time - logs, err := watcher.Fetcher.FetchLogs(watcher.Addresses, watcher.Topics, header) - if err != nil { - logrus.WithFields(logrus.Fields{ - "headerId": header.Id, - "headerHash": header.Hash, - "blockNumber": header.BlockNumber, - }).Errorf("Couldn't fetch logs for header: %v", err) - return err - } - - transactionsSyncErr := watcher.Syncer.SyncTransactions(header.Id, logs) - if transactionsSyncErr != nil { - logrus.Errorf("error syncing transactions: %s", transactionsSyncErr.Error()) - return transactionsSyncErr - } - - persistedLogs, createLogsErr := repository.CreateLogs(header.Id, logs, watcher.DB) - if createLogsErr != nil { - logrus.Errorf("error persisting logs: %s", createLogsErr.Error()) - } - - transformErr := watcher.transformLogs(persistedLogs, header.Id) - if transformErr != nil { - logrus.Error("Could not transform logs: ", transformErr) - return transformErr - } - } - return err -} - -func (watcher *EventWatcher) transformLogs(logs []core.HeaderSyncLog, headerID int64) error { - chunkedLogs := watcher.Chunker.ChunkLogs(logs) - - // Can't quit early and mark as checked if there are no logs. If we are running continuousLogSync, - // not all logs we're interested in might have been fetched. - for _, t := range watcher.Transformers { - transformerName := t.GetConfig().TransformerName - logChunk := chunkedLogs[transformerName] - err := t.Execute(logChunk, headerID) - if err != nil { - logrus.Errorf("%v transformer failed to execute in watcher: %v", transformerName, err) - return err - } - } return nil } - -func earlierStartingBlockNumber(transformerBlock, watcherBlock int64) bool { - return transformerBlock < watcherBlock -} diff --git a/libraries/shared/watcher/event_watcher_test.go b/libraries/shared/watcher/event_watcher_test.go index b3e606c1..041aca42 100644 --- a/libraries/shared/watcher/event_watcher_test.go +++ b/libraries/shared/watcher/event_watcher_test.go @@ -17,239 +17,97 @@ package watcher_test import ( - "errors" - "github.com/ethereum/go-ethereum" - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/core/types" . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" - "github.com/vulcanize/vulcanizedb/libraries/shared/constants" "github.com/vulcanize/vulcanizedb/libraries/shared/mocks" "github.com/vulcanize/vulcanizedb/libraries/shared/transformer" "github.com/vulcanize/vulcanizedb/libraries/shared/watcher" - "github.com/vulcanize/vulcanizedb/pkg/core" - "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres" - "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres/repositories" "github.com/vulcanize/vulcanizedb/pkg/fakes" - "github.com/vulcanize/vulcanizedb/test_config" ) -var _ = Describe("Watcher", func() { - It("initialises correctly", func() { - db := test_config.NewTestDB(core.Node{ID: "testNode"}) - bc := fakes.NewMockBlockChain() +var _ = Describe("Event Watcher", func() { + var ( + delegator *mocks.MockLogDelegator + extractor *mocks.MockLogExtractor + eventWatcher watcher.EventWatcher + ) - w := watcher.NewEventWatcher(db, bc) - - Expect(w.DB).To(Equal(db)) - Expect(w.Fetcher).NotTo(BeNil()) - Expect(w.Chunker).NotTo(BeNil()) + BeforeEach(func() { + delegator = &mocks.MockLogDelegator{} + extractor = &mocks.MockLogExtractor{} + eventWatcher = watcher.EventWatcher{ + LogDelegator: delegator, + LogExtractor: extractor, + } }) - It("adds transformers", func() { - w := watcher.NewEventWatcher(nil, nil) - fakeTransformer := &mocks.MockTransformer{} - fakeTransformer.SetTransformerConfig(mocks.FakeTransformerConfig) - w.AddTransformers([]transformer.EventTransformerInitializer{fakeTransformer.FakeTransformerInitializer}) - - Expect(len(w.Transformers)).To(Equal(1)) - Expect(w.Transformers).To(ConsistOf(fakeTransformer)) - Expect(w.Topics).To(Equal([]common.Hash{common.HexToHash("FakeTopic")})) - Expect(w.Addresses).To(Equal([]common.Address{common.HexToAddress("FakeAddress")})) - }) - - It("adds transformers from multiple sources", func() { - w := watcher.NewEventWatcher(nil, nil) - fakeTransformer1 := &mocks.MockTransformer{} - fakeTransformer1.SetTransformerConfig(mocks.FakeTransformerConfig) - - fakeTransformer2 := &mocks.MockTransformer{} - fakeTransformer2.SetTransformerConfig(mocks.FakeTransformerConfig) - - w.AddTransformers([]transformer.EventTransformerInitializer{fakeTransformer1.FakeTransformerInitializer}) - w.AddTransformers([]transformer.EventTransformerInitializer{fakeTransformer2.FakeTransformerInitializer}) - - Expect(len(w.Transformers)).To(Equal(2)) - Expect(w.Topics).To(Equal([]common.Hash{common.HexToHash("FakeTopic"), - common.HexToHash("FakeTopic")})) - Expect(w.Addresses).To(Equal([]common.Address{common.HexToAddress("FakeAddress"), - common.HexToAddress("FakeAddress")})) - }) - - It("calculates earliest starting block number", func() { - fakeTransformer1 := &mocks.MockTransformer{} - fakeTransformer1.SetTransformerConfig(transformer.EventTransformerConfig{StartingBlockNumber: 5}) - - fakeTransformer2 := &mocks.MockTransformer{} - fakeTransformer2.SetTransformerConfig(transformer.EventTransformerConfig{StartingBlockNumber: 3}) - - w := watcher.NewEventWatcher(nil, nil) - w.AddTransformers([]transformer.EventTransformerInitializer{ - fakeTransformer1.FakeTransformerInitializer, - fakeTransformer2.FakeTransformerInitializer, - }) - - Expect(*w.StartingBlock).To(Equal(int64(3))) - }) - - It("returns an error when run without transformers", func() { - w := watcher.NewEventWatcher(nil, nil) - err := w.Execute(constants.HeaderMissing) - Expect(err).To(MatchError("No transformers added to watcher")) - }) - - Describe("with missing headers", func() { + Describe("AddTransformers", func() { var ( - db *postgres.DB - w watcher.EventWatcher - mockBlockChain fakes.MockBlockChain - headerRepository repositories.HeaderRepository + fakeTransformerOne, fakeTransformerTwo *mocks.MockEventTransformer ) BeforeEach(func() { - db = test_config.NewTestDB(test_config.NewTestNode()) - test_config.CleanTestDB(db) - mockBlockChain = fakes.MockBlockChain{} - headerRepository = repositories.NewHeaderRepository(db) - _, err := headerRepository.CreateOrUpdateHeader(fakes.FakeHeader) - Expect(err).NotTo(HaveOccurred()) + fakeTransformerOne = &mocks.MockEventTransformer{} + fakeTransformerOne.SetTransformerConfig(mocks.FakeTransformerConfig) + fakeTransformerTwo = &mocks.MockEventTransformer{} + fakeTransformerTwo.SetTransformerConfig(mocks.FakeTransformerConfig) + initializers := []transformer.EventTransformerInitializer{ + fakeTransformerOne.FakeTransformerInitializer, + fakeTransformerTwo.FakeTransformerInitializer, + } - w = watcher.NewEventWatcher(db, &mockBlockChain) + eventWatcher.AddTransformers(initializers) }) - It("syncs transactions for fetched logs", func() { - fakeTransformer := &mocks.MockTransformer{} - w.AddTransformers([]transformer.EventTransformerInitializer{fakeTransformer.FakeTransformerInitializer}) - mockTransactionSyncer := &fakes.MockTransactionSyncer{} - w.Syncer = mockTransactionSyncer - - err := w.Execute(constants.HeaderMissing) - - Expect(err).NotTo(HaveOccurred()) - Expect(mockTransactionSyncer.SyncTransactionsCalled).To(BeTrue()) + It("adds initialized transformer to log delegator", func() { + expectedTransformers := []transformer.EventTransformer{ + fakeTransformerOne, + fakeTransformerTwo, + } + Expect(delegator.AddedTransformers).To(Equal(expectedTransformers)) }) - It("returns error if syncing transactions fails", func() { - fakeTransformer := &mocks.MockTransformer{} - w.AddTransformers([]transformer.EventTransformerInitializer{fakeTransformer.FakeTransformerInitializer}) - mockTransactionSyncer := &fakes.MockTransactionSyncer{} - mockTransactionSyncer.SyncTransactionsError = fakes.FakeError - w.Syncer = mockTransactionSyncer + It("adds transformer config to log extractor", func() { + expectedConfigs := []transformer.EventTransformerConfig{ + mocks.FakeTransformerConfig, + mocks.FakeTransformerConfig, + } + Expect(extractor.AddedConfigs).To(Equal(expectedConfigs)) + }) + }) - err := w.Execute(constants.HeaderMissing) + Describe("Execute", func() { + It("extracts watched logs", func() { + err := eventWatcher.Execute(constants.HeaderMissing) + + Expect(err).NotTo(HaveOccurred()) + Expect(extractor.ExtractLogsCalled).To(BeTrue()) + }) + + It("returns error if extracting logs fails", func() { + extractor.ExtractLogsError = fakes.FakeError + + err := eventWatcher.Execute(constants.HeaderMissing) Expect(err).To(HaveOccurred()) Expect(err).To(MatchError(fakes.FakeError)) }) - It("persists fetched logs", func() { - fakeTransformer := &mocks.MockTransformer{} - transformerConfig := transformer.EventTransformerConfig{TransformerName: "transformerA", - ContractAddresses: []string{"0x000000000000000000000000000000000000000A"}, - Topic: "0xA"} - fakeTransformer.SetTransformerConfig(transformerConfig) - w.AddTransformers([]transformer.EventTransformerInitializer{fakeTransformer.FakeTransformerInitializer}) - log := types.Log{Address: common.HexToAddress("0xA"), - Topics: []common.Hash{common.HexToHash("0xA")}, - Index: 0, - } - mockBlockChain.SetGetEthLogsWithCustomQueryReturnLogs([]types.Log{log}) - - err := w.Execute(constants.HeaderMissing) + It("delegates untransformed logs", func() { + err := eventWatcher.Execute(constants.HeaderMissing) Expect(err).NotTo(HaveOccurred()) - Expect(len(fakeTransformer.PassedLogs)).NotTo(BeZero()) - Expect(fakeTransformer.PassedLogs[0].ID).NotTo(BeZero()) - Expect(fakeTransformer.PassedLogs[0].Log).To(Equal(log)) + Expect(delegator.DelegateCalled).To(BeTrue()) }) - It("executes each transformer", func() { - fakeTransformer := &mocks.MockTransformer{} - w.AddTransformers([]transformer.EventTransformerInitializer{fakeTransformer.FakeTransformerInitializer}) + It("returns error if delegating logs fails", func() { + delegator.DelegateError = fakes.FakeError - err := w.Execute(constants.HeaderMissing) - Expect(err).NotTo(HaveOccurred()) - Expect(fakeTransformer.ExecuteWasCalled).To(BeTrue()) - }) + err := eventWatcher.Execute(constants.HeaderMissing) - It("returns an error if transformer returns an error", func() { - fakeTransformer := &mocks.MockTransformer{ExecuteError: errors.New("Something bad happened")} - w.AddTransformers([]transformer.EventTransformerInitializer{fakeTransformer.FakeTransformerInitializer}) - - err := w.Execute(constants.HeaderMissing) Expect(err).To(HaveOccurred()) - Expect(fakeTransformer.ExecuteWasCalled).To(BeFalse()) - }) - - It("passes only relevant logs to each transformer", func() { - transformerA := &mocks.MockTransformer{} - transformerB := &mocks.MockTransformer{} - - configA := transformer.EventTransformerConfig{TransformerName: "transformerA", - ContractAddresses: []string{"0x000000000000000000000000000000000000000A"}, - Topic: "0xA"} - configB := transformer.EventTransformerConfig{TransformerName: "transformerB", - ContractAddresses: []string{"0x000000000000000000000000000000000000000b"}, - Topic: "0xB"} - - transformerA.SetTransformerConfig(configA) - transformerB.SetTransformerConfig(configB) - - logA := types.Log{Address: common.HexToAddress("0xA"), - Topics: []common.Hash{common.HexToHash("0xA")}, - Index: 0, - } - logB := types.Log{Address: common.HexToAddress("0xB"), - Topics: []common.Hash{common.HexToHash("0xB")}, - Index: 1, - } - mockBlockChain.SetGetEthLogsWithCustomQueryReturnLogs([]types.Log{logA, logB}) - - w = watcher.NewEventWatcher(db, &mockBlockChain) - w.AddTransformers([]transformer.EventTransformerInitializer{ - transformerA.FakeTransformerInitializer, transformerB.FakeTransformerInitializer}) - - err := w.Execute(constants.HeaderMissing) - Expect(err).NotTo(HaveOccurred()) - Expect(len(transformerA.PassedLogs)).NotTo(BeZero()) - Expect(transformerA.PassedLogs[0].Log).To(Equal(logA)) - Expect(len(transformerB.PassedLogs)).NotTo(BeZero()) - Expect(transformerB.PassedLogs[0].Log).To(Equal(logB)) - }) - - Describe("uses the LogFetcher correctly:", func() { - var fakeTransformer mocks.MockTransformer - BeforeEach(func() { - fakeTransformer = mocks.MockTransformer{} - }) - - It("fetches logs for added transformers", func() { - addresses := []string{"0xA", "0xB"} - topic := "0x1" - fakeTransformer.SetTransformerConfig(transformer.EventTransformerConfig{ - Topic: topic, ContractAddresses: addresses}) - w.AddTransformers([]transformer.EventTransformerInitializer{fakeTransformer.FakeTransformerInitializer}) - - err := w.Execute(constants.HeaderMissing) - Expect(err).NotTo(HaveOccurred()) - - fakeHash := common.HexToHash(fakes.FakeHeader.Hash) - mockBlockChain.AssertGetEthLogsWithCustomQueryCalledWith(ethereum.FilterQuery{ - BlockHash: &fakeHash, - Addresses: transformer.HexStringsToAddresses(addresses), - Topics: [][]common.Hash{{common.HexToHash(topic)}}, - }) - }) - - It("propagates log fetcher errors", func() { - fetcherError := errors.New("FetcherError") - mockBlockChain.SetGetEthLogsWithCustomQueryErr(fetcherError) - - w.AddTransformers([]transformer.EventTransformerInitializer{fakeTransformer.FakeTransformerInitializer}) - err := w.Execute(constants.HeaderMissing) - Expect(err).To(MatchError(fetcherError)) - }) + Expect(err).To(MatchError(fakes.FakeError)) }) }) }) diff --git a/pkg/contract_watcher/full/retriever/block_retriever_test.go b/pkg/contract_watcher/full/retriever/block_retriever_test.go index f0255db0..88b171f8 100644 --- a/pkg/contract_watcher/full/retriever/block_retriever_test.go +++ b/pkg/contract_watcher/full/retriever/block_retriever_test.go @@ -72,7 +72,7 @@ var _ = Describe("Block Retriever", func() { Receipt: core.Receipt{ TxHash: "0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad6546ae", ContractAddress: constants.TusdContractAddress, - Logs: []core.Log{}, + Logs: []core.FullSyncLog{}, }, TxIndex: 0, Value: "0", @@ -92,7 +92,7 @@ var _ = Describe("Block Retriever", func() { Receipt: core.Receipt{ TxHash: "0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad234hfs", ContractAddress: constants.TusdContractAddress, - Logs: []core.Log{{ + Logs: []core.FullSyncLog{{ BlockNumber: 3, TxHash: "0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad234hfs", Address: constants.TusdContractAddress, @@ -136,7 +136,7 @@ var _ = Describe("Block Retriever", func() { Receipt: core.Receipt{ TxHash: "0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad6546ae", ContractAddress: "", - Logs: []core.Log{{ + Logs: []core.FullSyncLog{{ BlockNumber: 2, TxHash: "0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad6546ae", Address: constants.DaiContractAddress, @@ -167,7 +167,7 @@ var _ = Describe("Block Retriever", func() { Receipt: core.Receipt{ TxHash: "0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad234hfs", ContractAddress: "", - Logs: []core.Log{{ + Logs: []core.FullSyncLog{{ BlockNumber: 3, TxHash: "0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad234hfs", Address: constants.DaiContractAddress, diff --git a/pkg/contract_watcher/shared/helpers/test_helpers/database.go b/pkg/contract_watcher/shared/helpers/test_helpers/database.go index 66020ea1..3558614e 100644 --- a/pkg/contract_watcher/shared/helpers/test_helpers/database.go +++ b/pkg/contract_watcher/shared/helpers/test_helpers/database.go @@ -39,7 +39,7 @@ import ( type TransferLog struct { Id int64 `db:"id"` - VulvanizeLogId int64 `db:"vulcanize_log_id"` + VulcanizeLogId int64 `db:"vulcanize_log_id"` TokenName string `db:"token_name"` Block int64 `db:"block"` Tx string `db:"tx"` @@ -50,7 +50,7 @@ type TransferLog struct { type NewOwnerLog struct { Id int64 `db:"id"` - VulvanizeLogId int64 `db:"vulcanize_log_id"` + VulcanizeLogId int64 `db:"vulcanize_log_id"` TokenName string `db:"token_name"` Block int64 `db:"block"` Tx string `db:"tx"` @@ -138,13 +138,13 @@ func SetupTusdRepo(vulcanizeLogId *int64, wantedEvents, wantedMethods []string) Expect(err).NotTo(HaveOccurred()) receiptRepository := repositories.FullSyncReceiptRepository{DB: db} - logRepository := repositories.LogRepository{DB: db} + logRepository := repositories.FullSyncLogRepository{DB: db} blockRepository := *repositories.NewBlockRepository(db) blockNumber := rand.Int63() blockId := CreateBlock(blockNumber, blockRepository) - receipts := []core.Receipt{{Logs: []core.Log{{}}}} + receipts := []core.Receipt{{Logs: []core.FullSyncLog{{}}}} err = receiptRepository.CreateReceiptsAndLogs(blockId, receipts) Expect(err).ToNot(HaveOccurred()) @@ -184,13 +184,13 @@ func SetupENSRepo(vulcanizeLogId *int64, wantedEvents, wantedMethods []string) ( Expect(err).NotTo(HaveOccurred()) receiptRepository := repositories.FullSyncReceiptRepository{DB: db} - logRepository := repositories.LogRepository{DB: db} + logRepository := repositories.FullSyncLogRepository{DB: db} blockRepository := *repositories.NewBlockRepository(db) blockNumber := rand.Int63() blockId := CreateBlock(blockNumber, blockRepository) - receipts := []core.Receipt{{Logs: []core.Log{{}}}} + receipts := []core.Receipt{{Logs: []core.FullSyncLog{{}}}} err = receiptRepository.CreateReceiptsAndLogs(blockId, receipts) Expect(err).ToNot(HaveOccurred()) @@ -221,6 +221,7 @@ func SetupENSContract(wantedEvents, wantedMethods []string) *contract.Contract { }.Init() } +// TODO: tear down/setup DB from migrations so this doesn't alter the schema between tests func TearDown(db *postgres.DB) { tx, err := db.Beginx() Expect(err).NotTo(HaveOccurred()) @@ -255,7 +256,10 @@ func TearDown(db *postgres.DB) { _, err = tx.Exec(`DROP TABLE checked_headers`) Expect(err).NotTo(HaveOccurred()) - _, err = tx.Exec(`CREATE TABLE checked_headers (id SERIAL PRIMARY KEY, header_id INTEGER UNIQUE NOT NULL REFERENCES headers (id) ON DELETE CASCADE);`) + _, err = tx.Exec(`CREATE TABLE checked_headers ( + id SERIAL PRIMARY KEY, + header_id INTEGER UNIQUE NOT NULL REFERENCES headers (id) ON DELETE CASCADE, + check_count INTEGER NOT NULL DEFAULT 1);`) Expect(err).NotTo(HaveOccurred()) _, err = tx.Exec(`DROP SCHEMA IF EXISTS full_0x8dd5fbce2f6a956c3022ba3663759011dd51e73e CASCADE`) diff --git a/pkg/contract_watcher/shared/helpers/test_helpers/mocks/entities.go b/pkg/contract_watcher/shared/helpers/test_helpers/mocks/entities.go index be6d5aec..c4eff41c 100644 --- a/pkg/contract_watcher/shared/helpers/test_helpers/mocks/entities.go +++ b/pkg/contract_watcher/shared/helpers/test_helpers/mocks/entities.go @@ -41,7 +41,7 @@ var TransferBlock1 = core.Block{ Receipt: core.Receipt{ TxHash: "0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad654aaa", ContractAddress: "", - Logs: []core.Log{{ + Logs: []core.FullSyncLog{{ BlockNumber: 6194633, TxHash: "0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad654aaa", Address: constants.TusdContractAddress, @@ -71,7 +71,7 @@ var TransferBlock2 = core.Block{ Receipt: core.Receipt{ TxHash: "0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad654eee", ContractAddress: "", - Logs: []core.Log{{ + Logs: []core.FullSyncLog{{ BlockNumber: 6194634, TxHash: "0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad654eee", Address: constants.TusdContractAddress, @@ -101,7 +101,7 @@ var NewOwnerBlock1 = core.Block{ Receipt: core.Receipt{ TxHash: "0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad654bbb", ContractAddress: "", - Logs: []core.Log{{ + Logs: []core.FullSyncLog{{ BlockNumber: 6194635, TxHash: "0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad654bbb", Address: constants.EnsContractAddress, @@ -131,7 +131,7 @@ var NewOwnerBlock2 = core.Block{ Receipt: core.Receipt{ TxHash: "0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad654lll", ContractAddress: "", - Logs: []core.Log{{ + Logs: []core.FullSyncLog{{ BlockNumber: 6194636, TxHash: "0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad654lll", Address: constants.EnsContractAddress, diff --git a/pkg/contract_watcher/shared/repository/event_repository.go b/pkg/contract_watcher/shared/repository/event_repository.go index b95cf413..6fc136f8 100644 --- a/pkg/contract_watcher/shared/repository/event_repository.go +++ b/pkg/contract_watcher/shared/repository/event_repository.go @@ -140,7 +140,7 @@ func (r *eventRepository) persistHeaderSyncLogs(logs []types.Log, eventInfo type // Mark header as checked for this eventId eventId := strings.ToLower(eventInfo.Name + "_" + contractAddr) - err = repository.MarkHeaderCheckedInTransaction(logs[0].Id, tx, eventId) // This assumes all logs are from same block + err = repository.MarkContractWatcherHeaderCheckedInTransaction(logs[0].Id, tx, eventId) // This assumes all logs are from same block if err != nil { tx.Rollback() return err diff --git a/pkg/contract_watcher/shared/repository/event_repository_test.go b/pkg/contract_watcher/shared/repository/event_repository_test.go index 4b221e61..566246f6 100644 --- a/pkg/contract_watcher/shared/repository/event_repository_test.go +++ b/pkg/contract_watcher/shared/repository/event_repository_test.go @@ -158,7 +158,7 @@ var _ = Describe("Repository", func() { Expect(err).ToNot(HaveOccurred()) expectedLog := test_helpers.TransferLog{ Id: 1, - VulvanizeLogId: vulcanizeLogId, + VulcanizeLogId: vulcanizeLogId, TokenName: "TrueUSD", Block: 5488076, Tx: "0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad6546ae", @@ -180,7 +180,7 @@ var _ = Describe("Repository", func() { Expect(err).ToNot(HaveOccurred()) expectedLog := test_helpers.TransferLog{ Id: 1, - VulvanizeLogId: vulcanizeLogId, + VulcanizeLogId: vulcanizeLogId, TokenName: "TrueUSD", Block: 5488076, Tx: "0x135391a0962a63944e5908e6fedfff90fb4be3e3290a21017861099bad6546ae", diff --git a/pkg/core/blockchain.go b/pkg/core/blockchain.go index 68de3cc1..7f87a5b0 100644 --- a/pkg/core/blockchain.go +++ b/pkg/core/blockchain.go @@ -31,7 +31,7 @@ type BlockChain interface { GetEthLogsWithCustomQuery(query ethereum.FilterQuery) ([]types.Log, error) GetHeaderByNumber(blockNumber int64) (Header, error) GetHeadersByNumbers(blockNumbers []int64) ([]Header, error) - GetLogs(contract Contract, startingBlockNumber *big.Int, endingBlockNumber *big.Int) ([]Log, error) + GetFullSyncLogs(contract Contract, startingBlockNumber *big.Int, endingBlockNumber *big.Int) ([]FullSyncLog, error) GetTransactions(transactionHashes []common.Hash) ([]TransactionModel, error) LastBlock() (*big.Int, error) Node() Node diff --git a/pkg/core/log.go b/pkg/core/log.go index d1d576d6..7e21ffc3 100644 --- a/pkg/core/log.go +++ b/pkg/core/log.go @@ -18,7 +18,7 @@ package core import "github.com/ethereum/go-ethereum/core/types" -type Log struct { +type FullSyncLog struct { BlockNumber int64 TxHash string Address string diff --git a/pkg/core/receipts.go b/pkg/core/receipts.go index f58ba4f6..cf9ff5d5 100644 --- a/pkg/core/receipts.go +++ b/pkg/core/receipts.go @@ -21,7 +21,7 @@ type Receipt struct { ContractAddress string `db:"contract_address"` CumulativeGasUsed uint64 `db:"cumulative_gas_used"` GasUsed uint64 `db:"gas_used"` - Logs []Log + Logs []FullSyncLog StateRoot string `db:"state_root"` Status int TxHash string `db:"tx_hash"` diff --git a/pkg/datastore/postgres/postgres_test.go b/pkg/datastore/postgres/postgres_test.go index 1bf0e92e..3fff5484 100644 --- a/pkg/datastore/postgres/postgres_test.go +++ b/pkg/datastore/postgres/postgres_test.go @@ -126,16 +126,16 @@ var _ = Describe("Postgres DB", func() { It("does not commit log if log is invalid", func() { //badTxHash violates db tx_hash field length badTxHash := fmt.Sprintf("x %s", strings.Repeat("1", 100)) - badLog := core.Log{ + badLog := core.FullSyncLog{ Address: "x123", BlockNumber: 1, TxHash: badTxHash, } node := core.Node{GenesisBlock: "GENESIS", NetworkID: 1, ID: "x123", ClientName: "geth"} db, _ := postgres.NewDB(test_config.DBConfig, node) - logRepository := repositories.LogRepository{DB: db} + logRepository := repositories.FullSyncLogRepository{DB: db} - err := logRepository.CreateLogs([]core.Log{badLog}, 123) + err := logRepository.CreateLogs([]core.FullSyncLog{badLog}, 123) Expect(err).ToNot(BeNil()) savedBlock, err := logRepository.GetLogs("x123", 1) diff --git a/pkg/datastore/postgres/repositories/block_repository.go b/pkg/datastore/postgres/repositories/block_repository.go index 705f2589..c0e9e2d4 100644 --- a/pkg/datastore/postgres/repositories/block_repository.go +++ b/pkg/datastore/postgres/repositories/block_repository.go @@ -19,10 +19,8 @@ package repositories import ( "database/sql" "errors" - "github.com/jmoiron/sqlx" - log "github.com/sirupsen/logrus" - + "github.com/sirupsen/logrus" "github.com/vulcanize/vulcanizedb/pkg/core" "github.com/vulcanize/vulcanizedb/pkg/datastore" "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres" @@ -81,7 +79,7 @@ func (blockRepository BlockRepository) MissingBlockNumbers(startingBlockNumber i startingBlockNumber, highestBlockNumber, nodeId) if err != nil { - log.Error("MissingBlockNumbers: error getting blocks: ", err) + logrus.Error("MissingBlockNumbers: error getting blocks: ", err) } return numbers } @@ -112,7 +110,7 @@ func (blockRepository BlockRepository) GetBlock(blockNumber int64) (core.Block, case sql.ErrNoRows: return core.Block{}, datastore.ErrBlockDoesNotExist(blockNumber) default: - log.Error("GetBlock: error loading blocks: ", err) + logrus.Error("GetBlock: error loading blocks: ", err) return savedBlock, err } } @@ -151,7 +149,7 @@ func (blockRepository BlockRepository) insertBlock(block core.Block) (int64, err if insertBlockErr != nil { rollbackErr := tx.Rollback() if rollbackErr != nil { - log.Error("failed to rollback transaction: ", rollbackErr) + logrus.Error("failed to rollback transaction: ", rollbackErr) } return 0, postgres.ErrDBInsertFailed(insertBlockErr) } @@ -167,7 +165,7 @@ func (blockRepository BlockRepository) insertBlock(block core.Block) (int64, err if insertTxErr != nil { rollbackErr := tx.Rollback() if rollbackErr != nil { - log.Warn("failed to rollback transaction: ", rollbackErr) + logrus.Warn("failed to rollback transaction: ", rollbackErr) } return 0, postgres.ErrDBInsertFailed(insertTxErr) } @@ -176,7 +174,7 @@ func (blockRepository BlockRepository) insertBlock(block core.Block) (int64, err if commitErr != nil { rollbackErr := tx.Rollback() if rollbackErr != nil { - log.Warn("failed to rollback transaction: ", rollbackErr) + logrus.Warn("failed to rollback transaction: ", rollbackErr) } return 0, commitErr } @@ -268,7 +266,7 @@ func (blockRepository BlockRepository) getBlockHash(block core.Block) (string, b return retrievedBlockHash, blockExists(retrievedBlockHash) } -func (blockRepository BlockRepository) createLogs(tx *sqlx.Tx, logs []core.Log, receiptId int64) error { +func (blockRepository BlockRepository) createLogs(tx *sqlx.Tx, logs []core.FullSyncLog, receiptId int64) error { for _, tlog := range logs { _, err := tx.Exec( `INSERT INTO full_sync_logs (block_number, address, tx_hash, index, topic0, topic1, topic2, topic3, data, receipt_id) @@ -305,7 +303,7 @@ func (blockRepository BlockRepository) loadBlock(blockRows *sqlx.Row) (core.Bloc var block b err := blockRows.StructScan(&block) if err != nil { - log.Error("loadBlock: error loading block: ", err) + logrus.Error("loadBlock: error loading block: ", err) return core.Block{}, err } transactionRows, err := blockRepository.database.Queryx(` @@ -323,7 +321,7 @@ func (blockRepository BlockRepository) loadBlock(blockRows *sqlx.Row) (core.Bloc WHERE block_id = $1 ORDER BY hash`, block.ID) if err != nil { - log.Error("loadBlock: error fetting transactions: ", err) + logrus.Error("loadBlock: error fetting transactions: ", err) return core.Block{}, err } block.Transactions = blockRepository.LoadTransactions(transactionRows) @@ -336,7 +334,7 @@ func (blockRepository BlockRepository) LoadTransactions(transactionRows *sqlx.Ro var transaction core.TransactionModel err := transactionRows.StructScan(&transaction) if err != nil { - log.Fatal(err) + logrus.Fatal(err) } transactions = append(transactions, transaction) } diff --git a/pkg/datastore/postgres/repositories/checked_headers_repository.go b/pkg/datastore/postgres/repositories/checked_headers_repository.go new file mode 100644 index 00000000..272a8dae --- /dev/null +++ b/pkg/datastore/postgres/repositories/checked_headers_repository.go @@ -0,0 +1,70 @@ +// VulcanizeDB +// Copyright © 2019 Vulcanize + +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU Affero General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU Affero General Public License for more details. + +// You should have received a copy of the GNU Affero General Public License +// along with this program. If not, see . + +package repositories + +import ( + "github.com/vulcanize/vulcanizedb/pkg/core" + "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres" +) + +const ( + insertCheckedHeaderQuery = `INSERT INTO public.checked_headers (header_id) VALUES ($1) + ON CONFLICT (header_id) DO UPDATE + SET check_count = (SELECT check_count FROM public.checked_headers WHERE header_id = $1) + 1` +) + +type CheckedHeadersRepository struct { + db *postgres.DB +} + +func NewCheckedHeadersRepository(db *postgres.DB) CheckedHeadersRepository { + return CheckedHeadersRepository{db: db} +} + +// Adds header_id to the checked_headers table, or increment check_count if header_id already present +func (repo CheckedHeadersRepository) MarkHeaderChecked(headerID int64) error { + _, err := repo.db.Exec(insertCheckedHeaderQuery, headerID) + return err +} + +// Return header_id if not present in checked_headers or its check_count is < passed checkCount +func (repo CheckedHeadersRepository) MissingHeaders(startingBlockNumber, endingBlockNumber, checkCount int64) ([]core.Header, error) { + var result []core.Header + var query string + var err error + + if endingBlockNumber == -1 { + query = `SELECT headers.id, headers.block_number, headers.hash FROM headers + LEFT JOIN checked_headers on headers.id = header_id + WHERE (header_id ISNULL OR check_count < $2) + AND headers.block_number >= $1 + AND headers.eth_node_fingerprint = $3 + LIMIT 100` + err = repo.db.Select(&result, query, startingBlockNumber, checkCount, repo.db.Node.ID) + } else { + query = `SELECT headers.id, headers.block_number, headers.hash FROM headers + LEFT JOIN checked_headers on headers.id = header_id + WHERE (header_id ISNULL OR check_count < $3) + AND headers.block_number >= $1 + AND headers.block_number <= $2 + AND headers.eth_node_fingerprint = $4 + LIMIT 100` + err = repo.db.Select(&result, query, startingBlockNumber, endingBlockNumber, checkCount, repo.db.Node.ID) + } + + return result, err +} diff --git a/pkg/datastore/postgres/repositories/checked_headers_repository_test.go b/pkg/datastore/postgres/repositories/checked_headers_repository_test.go new file mode 100644 index 00000000..422d00b3 --- /dev/null +++ b/pkg/datastore/postgres/repositories/checked_headers_repository_test.go @@ -0,0 +1,270 @@ +// VulcanizeDB +// Copyright © 2019 Vulcanize + +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU Affero General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU Affero General Public License for more details. + +// You should have received a copy of the GNU Affero General Public License +// along with this program. If not, see . + +package repositories_test + +import ( + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + "github.com/vulcanize/vulcanizedb/pkg/core" + "github.com/vulcanize/vulcanizedb/pkg/datastore" + "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres" + "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres/repositories" + "github.com/vulcanize/vulcanizedb/pkg/fakes" + "github.com/vulcanize/vulcanizedb/test_config" + "math/rand" +) + +var _ = Describe("Checked Headers repository", func() { + var ( + db *postgres.DB + repo datastore.CheckedHeadersRepository + ) + + Describe("MarkHeaderChecked", func() { + BeforeEach(func() { + db = test_config.NewTestDB(test_config.NewTestNode()) + test_config.CleanTestDB(db) + repo = repositories.NewCheckedHeadersRepository(db) + }) + + It("marks passed header as checked on insert", func() { + headerRepository := repositories.NewHeaderRepository(db) + headerID, headerErr := headerRepository.CreateOrUpdateHeader(fakes.FakeHeader) + Expect(headerErr).NotTo(HaveOccurred()) + + err := repo.MarkHeaderChecked(headerID) + + Expect(err).NotTo(HaveOccurred()) + var checkedCount int + fetchErr := db.Get(&checkedCount, `SELECT check_count FROM public.checked_headers WHERE header_id = $1`, headerID) + Expect(fetchErr).NotTo(HaveOccurred()) + Expect(checkedCount).To(Equal(1)) + }) + + It("increments check count on update", func() { + headerRepository := repositories.NewHeaderRepository(db) + headerID, headerErr := headerRepository.CreateOrUpdateHeader(fakes.FakeHeader) + Expect(headerErr).NotTo(HaveOccurred()) + + insertErr := repo.MarkHeaderChecked(headerID) + Expect(insertErr).NotTo(HaveOccurred()) + + updateErr := repo.MarkHeaderChecked(headerID) + Expect(updateErr).NotTo(HaveOccurred()) + + var checkedCount int + fetchErr := db.Get(&checkedCount, `SELECT check_count FROM public.checked_headers WHERE header_id = $1`, headerID) + Expect(fetchErr).NotTo(HaveOccurred()) + Expect(checkedCount).To(Equal(2)) + }) + }) + + Describe("MissingHeaders", func() { + var ( + headerRepository datastore.HeaderRepository + startingBlockNumber int64 + endingBlockNumber int64 + middleBlockNumber int64 + outOfRangeBlockNumber int64 + blockNumbers []int64 + headerIDs []int64 + err error + uncheckedCheckCount = int64(1) + recheckCheckCount = int64(2) + ) + + BeforeEach(func() { + db = test_config.NewTestDB(test_config.NewTestNode()) + test_config.CleanTestDB(db) + headerRepository = repositories.NewHeaderRepository(db) + repo = repositories.NewCheckedHeadersRepository(db) + + startingBlockNumber = rand.Int63() + middleBlockNumber = startingBlockNumber + 1 + endingBlockNumber = startingBlockNumber + 2 + outOfRangeBlockNumber = endingBlockNumber + 1 + + blockNumbers = []int64{startingBlockNumber, middleBlockNumber, endingBlockNumber, outOfRangeBlockNumber} + + headerIDs = []int64{} + for _, n := range blockNumbers { + headerID, err := headerRepository.CreateOrUpdateHeader(fakes.GetFakeHeader(n)) + headerIDs = append(headerIDs, headerID) + Expect(err).NotTo(HaveOccurred()) + } + }) + + Describe("when ending block is specified", func() { + It("excludes headers that are out of range", func() { + headers, err := repo.MissingHeaders(startingBlockNumber, endingBlockNumber, uncheckedCheckCount) + + Expect(err).NotTo(HaveOccurred()) + // doesn't include outOfRangeBlockNumber + Expect(len(headers)).To(Equal(3)) + Expect(headers[0].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(endingBlockNumber), Equal(middleBlockNumber))) + Expect(headers[1].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(endingBlockNumber), Equal(middleBlockNumber))) + Expect(headers[2].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(endingBlockNumber), Equal(middleBlockNumber))) + }) + + It("excludes headers that have been checked more than the check count", func() { + _, err = db.Exec(`INSERT INTO public.checked_headers (header_id) VALUES ($1)`, headerIDs[1]) + Expect(err).NotTo(HaveOccurred()) + + headers, err := repo.MissingHeaders(startingBlockNumber, endingBlockNumber, uncheckedCheckCount) + + Expect(err).NotTo(HaveOccurred()) + // doesn't include middleBlockNumber + Expect(len(headers)).To(Equal(2)) + Expect(headers[0].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(endingBlockNumber))) + Expect(headers[1].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(endingBlockNumber))) + }) + + It("does not exclude headers that have been checked less than the check count", func() { + _, err = db.Exec(`INSERT INTO public.checked_headers (header_id) VALUES ($1)`, headerIDs[1]) + Expect(err).NotTo(HaveOccurred()) + + headers, err := repo.MissingHeaders(startingBlockNumber, endingBlockNumber, recheckCheckCount) + + Expect(err).NotTo(HaveOccurred()) + Expect(len(headers)).To(Equal(3)) + Expect(headers[0].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(middleBlockNumber), Equal(endingBlockNumber))) + Expect(headers[1].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(middleBlockNumber), Equal(endingBlockNumber))) + Expect(headers[2].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(middleBlockNumber), Equal(endingBlockNumber))) + }) + + It("only returns headers associated with the current node", func() { + dbTwo := test_config.NewTestDB(core.Node{ID: "second"}) + headerRepositoryTwo := repositories.NewHeaderRepository(dbTwo) + repoTwo := repositories.NewCheckedHeadersRepository(dbTwo) + for _, n := range blockNumbers { + _, err = headerRepositoryTwo.CreateOrUpdateHeader(fakes.GetFakeHeader(n + 10)) + Expect(err).NotTo(HaveOccurred()) + } + + Expect(err).NotTo(HaveOccurred()) + nodeOneMissingHeaders, err := repo.MissingHeaders(startingBlockNumber, endingBlockNumber, uncheckedCheckCount) + Expect(err).NotTo(HaveOccurred()) + Expect(len(nodeOneMissingHeaders)).To(Equal(3)) + Expect(nodeOneMissingHeaders[0].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(middleBlockNumber), Equal(endingBlockNumber))) + Expect(nodeOneMissingHeaders[1].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(middleBlockNumber), Equal(endingBlockNumber))) + Expect(nodeOneMissingHeaders[2].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(middleBlockNumber), Equal(endingBlockNumber))) + + nodeTwoMissingHeaders, err := repoTwo.MissingHeaders(startingBlockNumber, endingBlockNumber+10, uncheckedCheckCount) + Expect(err).NotTo(HaveOccurred()) + Expect(len(nodeTwoMissingHeaders)).To(Equal(3)) + Expect(nodeTwoMissingHeaders[0].BlockNumber).To(Or(Equal(startingBlockNumber+10), Equal(middleBlockNumber+10), Equal(endingBlockNumber+10))) + Expect(nodeTwoMissingHeaders[1].BlockNumber).To(Or(Equal(startingBlockNumber+10), Equal(middleBlockNumber+10), Equal(endingBlockNumber+10))) + Expect(nodeTwoMissingHeaders[2].BlockNumber).To(Or(Equal(startingBlockNumber+10), Equal(middleBlockNumber+10), Equal(endingBlockNumber+10))) + }) + + It("only returns 100 results to prevent blocking log delegation", func() { + for n := outOfRangeBlockNumber + 1; n < outOfRangeBlockNumber+100; n++ { + _, err := headerRepository.CreateOrUpdateHeader(fakes.GetFakeHeader(n)) + Expect(err).NotTo(HaveOccurred()) + } + + missingHeaders, err := repo.MissingHeaders(startingBlockNumber, endingBlockNumber+200, uncheckedCheckCount) + + Expect(err).NotTo(HaveOccurred()) + Expect(len(missingHeaders)).To(Equal(100)) + }) + }) + + Describe("when ending block is -1", func() { + var endingBlock = int64(-1) + + It("includes all non-checked headers when ending block is -1 ", func() { + headers, err := repo.MissingHeaders(startingBlockNumber, endingBlock, uncheckedCheckCount) + + Expect(err).NotTo(HaveOccurred()) + Expect(len(headers)).To(Equal(4)) + Expect(headers[0].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(endingBlockNumber), Equal(middleBlockNumber), Equal(outOfRangeBlockNumber))) + Expect(headers[1].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(endingBlockNumber), Equal(middleBlockNumber), Equal(outOfRangeBlockNumber))) + Expect(headers[2].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(endingBlockNumber), Equal(middleBlockNumber), Equal(outOfRangeBlockNumber))) + Expect(headers[3].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(endingBlockNumber), Equal(middleBlockNumber), Equal(outOfRangeBlockNumber))) + }) + + It("excludes headers that have been checked more than the check count", func() { + _, err = db.Exec(`INSERT INTO public.checked_headers (header_id) VALUES ($1)`, headerIDs[1]) + Expect(err).NotTo(HaveOccurred()) + + headers, err := repo.MissingHeaders(startingBlockNumber, endingBlock, uncheckedCheckCount) + + Expect(err).NotTo(HaveOccurred()) + // doesn't include middleBlockNumber + Expect(len(headers)).To(Equal(3)) + Expect(headers[0].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(endingBlockNumber), Equal(outOfRangeBlockNumber))) + Expect(headers[1].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(endingBlockNumber), Equal(outOfRangeBlockNumber))) + Expect(headers[2].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(endingBlockNumber), Equal(outOfRangeBlockNumber))) + }) + + It("does not exclude headers that have been checked less than the check count", func() { + _, err = db.Exec(`INSERT INTO public.checked_headers (header_id) VALUES ($1)`, headerIDs[1]) + Expect(err).NotTo(HaveOccurred()) + + headers, err := repo.MissingHeaders(startingBlockNumber, endingBlock, recheckCheckCount) + + Expect(err).NotTo(HaveOccurred()) + Expect(len(headers)).To(Equal(4)) + Expect(headers[0].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(middleBlockNumber), Equal(endingBlockNumber), Equal(outOfRangeBlockNumber))) + Expect(headers[1].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(middleBlockNumber), Equal(endingBlockNumber), Equal(outOfRangeBlockNumber))) + Expect(headers[2].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(middleBlockNumber), Equal(endingBlockNumber), Equal(outOfRangeBlockNumber))) + Expect(headers[3].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(middleBlockNumber), Equal(endingBlockNumber), Equal(outOfRangeBlockNumber))) + }) + + It("only returns headers associated with the current node", func() { + dbTwo := test_config.NewTestDB(core.Node{ID: "second"}) + headerRepositoryTwo := repositories.NewHeaderRepository(dbTwo) + repoTwo := repositories.NewCheckedHeadersRepository(dbTwo) + for _, n := range blockNumbers { + _, err = headerRepositoryTwo.CreateOrUpdateHeader(fakes.GetFakeHeader(n + 10)) + Expect(err).NotTo(HaveOccurred()) + } + + Expect(err).NotTo(HaveOccurred()) + nodeOneMissingHeaders, err := repo.MissingHeaders(startingBlockNumber, endingBlock, uncheckedCheckCount) + Expect(err).NotTo(HaveOccurred()) + Expect(len(nodeOneMissingHeaders)).To(Equal(4)) + Expect(nodeOneMissingHeaders[0].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(middleBlockNumber), Equal(endingBlockNumber), Equal(outOfRangeBlockNumber))) + Expect(nodeOneMissingHeaders[1].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(middleBlockNumber), Equal(endingBlockNumber), Equal(outOfRangeBlockNumber))) + Expect(nodeOneMissingHeaders[2].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(middleBlockNumber), Equal(endingBlockNumber), Equal(outOfRangeBlockNumber))) + Expect(nodeOneMissingHeaders[3].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(middleBlockNumber), Equal(endingBlockNumber), Equal(outOfRangeBlockNumber))) + + nodeTwoMissingHeaders, err := repoTwo.MissingHeaders(startingBlockNumber, endingBlock, uncheckedCheckCount) + Expect(err).NotTo(HaveOccurred()) + Expect(len(nodeTwoMissingHeaders)).To(Equal(4)) + Expect(nodeTwoMissingHeaders[0].BlockNumber).To(Or(Equal(startingBlockNumber+10), Equal(middleBlockNumber+10), Equal(endingBlockNumber+10), Equal(outOfRangeBlockNumber+10))) + Expect(nodeTwoMissingHeaders[1].BlockNumber).To(Or(Equal(startingBlockNumber+10), Equal(middleBlockNumber+10), Equal(endingBlockNumber+10), Equal(outOfRangeBlockNumber+10))) + Expect(nodeTwoMissingHeaders[2].BlockNumber).To(Or(Equal(startingBlockNumber+10), Equal(middleBlockNumber+10), Equal(endingBlockNumber+10), Equal(outOfRangeBlockNumber+10))) + Expect(nodeTwoMissingHeaders[3].BlockNumber).To(Or(Equal(startingBlockNumber+10), Equal(middleBlockNumber+10), Equal(endingBlockNumber+10), Equal(outOfRangeBlockNumber+10))) + }) + + It("only returns 100 results to prevent blocking log delegation", func() { + for n := outOfRangeBlockNumber + 1; n < outOfRangeBlockNumber+100; n++ { + _, err := headerRepository.CreateOrUpdateHeader(fakes.GetFakeHeader(n)) + Expect(err).NotTo(HaveOccurred()) + } + + missingHeaders, err := repo.MissingHeaders(startingBlockNumber, endingBlock, uncheckedCheckCount) + + Expect(err).NotTo(HaveOccurred()) + Expect(len(missingHeaders)).To(Equal(100)) + }) + }) + + }) +}) diff --git a/pkg/datastore/postgres/repositories/logs_repository.go b/pkg/datastore/postgres/repositories/full_sync_log_repository.go similarity index 81% rename from pkg/datastore/postgres/repositories/logs_repository.go rename to pkg/datastore/postgres/repositories/full_sync_log_repository.go index 34cdddf4..4c8c1d4f 100644 --- a/pkg/datastore/postgres/repositories/logs_repository.go +++ b/pkg/datastore/postgres/repositories/full_sync_log_repository.go @@ -17,20 +17,18 @@ package repositories import ( - "github.com/sirupsen/logrus" - "database/sql" - + "github.com/sirupsen/logrus" "github.com/vulcanize/vulcanizedb/pkg/core" "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres" ) -type LogRepository struct { +type FullSyncLogRepository struct { *postgres.DB } -func (logRepository LogRepository) CreateLogs(lgs []core.Log, receiptId int64) error { - tx, _ := logRepository.DB.Beginx() +func (repository FullSyncLogRepository) CreateLogs(lgs []core.FullSyncLog, receiptId int64) error { + tx, _ := repository.DB.Beginx() for _, tlog := range lgs { _, insertLogErr := tx.Exec( `INSERT INTO full_sync_logs (block_number, address, tx_hash, index, topic0, topic1, topic2, topic3, data, receipt_id) @@ -57,8 +55,8 @@ func (logRepository LogRepository) CreateLogs(lgs []core.Log, receiptId int64) e return nil } -func (logRepository LogRepository) GetLogs(address string, blockNumber int64) ([]core.Log, error) { - logRows, err := logRepository.DB.Query( +func (repository FullSyncLogRepository) GetLogs(address string, blockNumber int64) ([]core.FullSyncLog, error) { + logRows, err := repository.DB.Query( `SELECT block_number, address, tx_hash, @@ -72,13 +70,13 @@ func (logRepository LogRepository) GetLogs(address string, blockNumber int64) ([ WHERE address = $1 AND block_number = $2 ORDER BY block_number DESC`, address, blockNumber) if err != nil { - return []core.Log{}, err + return []core.FullSyncLog{}, err } - return logRepository.loadLogs(logRows) + return repository.loadLogs(logRows) } -func (logRepository LogRepository) loadLogs(logsRows *sql.Rows) ([]core.Log, error) { - var lgs []core.Log +func (repository FullSyncLogRepository) loadLogs(logsRows *sql.Rows) ([]core.FullSyncLog, error) { + var lgs []core.FullSyncLog for logsRows.Next() { var blockNumber int64 var address string @@ -89,9 +87,9 @@ func (logRepository LogRepository) loadLogs(logsRows *sql.Rows) ([]core.Log, err err := logsRows.Scan(&blockNumber, &address, &txHash, &index, &topics[0], &topics[1], &topics[2], &topics[3], &data) if err != nil { logrus.Error("loadLogs: Error scanning a row in logRows: ", err) - return []core.Log{}, err + return []core.FullSyncLog{}, err } - lg := core.Log{ + lg := core.FullSyncLog{ BlockNumber: blockNumber, TxHash: txHash, Address: address, diff --git a/pkg/datastore/postgres/repositories/logs_repository_test.go b/pkg/datastore/postgres/repositories/full_sync_log_repository_test.go similarity index 95% rename from pkg/datastore/postgres/repositories/logs_repository_test.go rename to pkg/datastore/postgres/repositories/full_sync_log_repository_test.go index c6544aa4..981c242a 100644 --- a/pkg/datastore/postgres/repositories/logs_repository_test.go +++ b/pkg/datastore/postgres/repositories/full_sync_log_repository_test.go @@ -29,11 +29,11 @@ import ( "github.com/vulcanize/vulcanizedb/test_config" ) -var _ = Describe("Logs Repository", func() { +var _ = Describe("Full sync log Repository", func() { Describe("Saving logs", func() { var db *postgres.DB var blockRepository datastore.BlockRepository - var logsRepository datastore.LogRepository + var logsRepository datastore.FullSyncLogRepository var receiptRepository datastore.FullSyncReceiptRepository var node core.Node @@ -47,7 +47,7 @@ var _ = Describe("Logs Repository", func() { db = test_config.NewTestDB(node) test_config.CleanTestDB(db) blockRepository = repositories.NewBlockRepository(db) - logsRepository = repositories.LogRepository{DB: db} + logsRepository = repositories.FullSyncLogRepository{DB: db} receiptRepository = repositories.FullSyncReceiptRepository{DB: db} }) @@ -59,7 +59,7 @@ var _ = Describe("Logs Repository", func() { receiptId, err := receiptRepository.CreateFullSyncReceiptInTx(blockId, core.Receipt{}, tx) tx.Commit() Expect(err).NotTo(HaveOccurred()) - err = logsRepository.CreateLogs([]core.Log{{ + err = logsRepository.CreateLogs([]core.FullSyncLog{{ BlockNumber: blockNumber, Index: 0, Address: "x123", @@ -98,7 +98,7 @@ var _ = Describe("Logs Repository", func() { tx.Commit() Expect(err).NotTo(HaveOccurred()) - err = logsRepository.CreateLogs([]core.Log{{ + err = logsRepository.CreateLogs([]core.FullSyncLog{{ BlockNumber: blockNumber, Index: 0, Address: "x123", @@ -108,7 +108,7 @@ var _ = Describe("Logs Repository", func() { }}, receiptId) Expect(err).NotTo(HaveOccurred()) - err = logsRepository.CreateLogs([]core.Log{{ + err = logsRepository.CreateLogs([]core.FullSyncLog{{ BlockNumber: blockNumber, Index: 1, Address: "x123", @@ -118,7 +118,7 @@ var _ = Describe("Logs Repository", func() { }}, receiptId) Expect(err).NotTo(HaveOccurred()) - err = logsRepository.CreateLogs([]core.Log{{ + err = logsRepository.CreateLogs([]core.FullSyncLog{{ BlockNumber: 2, Index: 0, Address: "x123", @@ -162,7 +162,7 @@ var _ = Describe("Logs Repository", func() { It("saves the logs attached to a receipt", func() { - logs := []core.Log{{ + logs := []core.FullSyncLog{{ Address: "0x8a4774fe82c63484afef97ca8d89a6ea5e21f973", BlockNumber: 4745407, Data: "0x0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000645a68669900000000000000000000000000000000000000000000003397684ab5869b0000000000000000000000000000000000000000000000000000000000005a36053200000000000000000000000099041f808d598b782d5a3e498681c2452a31da08", diff --git a/pkg/datastore/postgres/repositories/full_sync_receipt_repository.go b/pkg/datastore/postgres/repositories/full_sync_receipt_repository.go index 50eb86be..0932dd3f 100644 --- a/pkg/datastore/postgres/repositories/full_sync_receipt_repository.go +++ b/pkg/datastore/postgres/repositories/full_sync_receipt_repository.go @@ -20,7 +20,6 @@ import ( "database/sql" "github.com/jmoiron/sqlx" "github.com/sirupsen/logrus" - "github.com/vulcanize/vulcanizedb/pkg/core" "github.com/vulcanize/vulcanizedb/pkg/datastore" "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres" @@ -53,7 +52,22 @@ func (receiptRepository FullSyncReceiptRepository) CreateReceiptsAndLogs(blockId return nil } -func createLogs(logs []core.Log, receiptId int64, tx *sqlx.Tx) error { +func createReceipt(receipt core.Receipt, blockId int64, tx *sqlx.Tx) (int64, error) { + var receiptId int64 + err := tx.QueryRow( + `INSERT INTO full_sync_receipts + (contract_address, tx_hash, cumulative_gas_used, gas_used, state_root, status, block_id) + VALUES ($1, $2, $3, $4, $5, $6, $7) + RETURNING id`, + receipt.ContractAddress, receipt.TxHash, receipt.CumulativeGasUsed, receipt.GasUsed, receipt.StateRoot, receipt.Status, blockId, + ).Scan(&receiptId) + if err != nil { + logrus.Error("createReceipt: Error inserting: ", err) + } + return receiptId, err +} + +func createLogs(logs []core.FullSyncLog, receiptId int64, tx *sqlx.Tx) error { for _, log := range logs { _, err := tx.Exec( `INSERT INTO full_sync_logs (block_number, address, tx_hash, index, topic0, topic1, topic2, topic3, data, receipt_id) diff --git a/pkg/datastore/postgres/repositories/full_sync_receipt_repository_test.go b/pkg/datastore/postgres/repositories/full_sync_receipt_repository_test.go index 0232cc99..ed84c381 100644 --- a/pkg/datastore/postgres/repositories/full_sync_receipt_repository_test.go +++ b/pkg/datastore/postgres/repositories/full_sync_receipt_repository_test.go @@ -29,7 +29,7 @@ import ( var _ = Describe("Receipt Repository", func() { var blockRepository datastore.BlockRepository - var logRepository datastore.LogRepository + var logRepository datastore.FullSyncLogRepository var receiptRepository datastore.FullSyncReceiptRepository var db *postgres.DB var node core.Node @@ -43,7 +43,7 @@ var _ = Describe("Receipt Repository", func() { db = test_config.NewTestDB(node) test_config.CleanTestDB(db) blockRepository = repositories.NewBlockRepository(db) - logRepository = repositories.LogRepository{DB: db} + logRepository = repositories.FullSyncLogRepository{DB: db} receiptRepository = repositories.FullSyncReceiptRepository{DB: db} }) @@ -56,7 +56,7 @@ var _ = Describe("Receipt Repository", func() { txHashTwo := "0xTxHashTwo" addressOne := "0xAddressOne" addressTwo := "0xAddressTwo" - logsOne := []core.Log{{ + logsOne := []core.FullSyncLog{{ Address: addressOne, BlockNumber: blockNumber, TxHash: txHashOne, @@ -65,7 +65,7 @@ var _ = Describe("Receipt Repository", func() { BlockNumber: blockNumber, TxHash: txHashOne, }} - logsTwo := []core.Log{{ + logsTwo := []core.FullSyncLog{{ BlockNumber: blockNumber, TxHash: txHashTwo, Address: addressTwo, @@ -112,7 +112,7 @@ var _ = Describe("Receipt Repository", func() { ContractAddress: "0xde0b295669a9fd93d5f28d9ec85e40f4cb697bae", CumulativeGasUsed: 7996119, GasUsed: 21000, - Logs: []core.Log{}, + Logs: []core.FullSyncLog{}, StateRoot: "0x88abf7e73128227370aa7baa3dd4e18d0af70e92ef1f9ef426942fbe2dddb733", Status: 1, TxHash: "0xe340558980f89d5f86045ac11e5cc34e4bcec20f9f1e2a427aa39d87114e8223", diff --git a/pkg/datastore/postgres/repositories/header_sync_log_repository.go b/pkg/datastore/postgres/repositories/header_sync_log_repository.go new file mode 100644 index 00000000..fef7e6b3 --- /dev/null +++ b/pkg/datastore/postgres/repositories/header_sync_log_repository.go @@ -0,0 +1,134 @@ +// VulcanizeDB +// Copyright © 2019 Vulcanize + +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU Affero General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU Affero General Public License for more details. + +// You should have received a copy of the GNU Affero General Public License +// along with this program. If not, see . + +package repositories + +import ( + "github.com/ethereum/go-ethereum/common" + "github.com/ethereum/go-ethereum/core/types" + "github.com/jmoiron/sqlx" + "github.com/lib/pq" + "github.com/sirupsen/logrus" + "github.com/vulcanize/vulcanizedb/pkg/core" + "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres" +) + +const insertHeaderSyncLogQuery = `INSERT INTO header_sync_logs + (header_id, address, topics, data, block_number, block_hash, tx_index, tx_hash, log_index, raw) + VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10) ON CONFLICT DO NOTHING` + +type HeaderSyncLogRepository struct { + db *postgres.DB +} + +func NewHeaderSyncLogRepository(db *postgres.DB) HeaderSyncLogRepository { + return HeaderSyncLogRepository{db: db} +} + +type headerSyncLog struct { + ID int64 + HeaderID int64 `db:"header_id"` + Address string + Topics pq.ByteaArray + Data []byte + BlockNumber uint64 `db:"block_number"` + BlockHash string `db:"block_hash"` + TxHash string `db:"tx_hash"` + TxIndex uint `db:"tx_index"` + LogIndex uint `db:"log_index"` + Transformed bool + Raw []byte +} + +func (repository HeaderSyncLogRepository) GetUntransformedHeaderSyncLogs() ([]core.HeaderSyncLog, error) { + rows, queryErr := repository.db.Queryx(`SELECT * FROM public.header_sync_logs WHERE transformed = false`) + if queryErr != nil { + return nil, queryErr + } + + var results []core.HeaderSyncLog + for rows.Next() { + var rawLog headerSyncLog + scanErr := rows.StructScan(&rawLog) + if scanErr != nil { + return nil, scanErr + } + var logTopics []common.Hash + for _, topic := range rawLog.Topics { + logTopics = append(logTopics, common.BytesToHash(topic)) + } + reconstructedLog := types.Log{ + Address: common.HexToAddress(rawLog.Address), + Topics: logTopics, + Data: rawLog.Data, + BlockNumber: rawLog.BlockNumber, + TxHash: common.HexToHash(rawLog.TxHash), + TxIndex: rawLog.TxIndex, + BlockHash: common.HexToHash(rawLog.BlockHash), + Index: rawLog.LogIndex, + // TODO: revisit if not cascade deleting logs when header removed + // currently, fetched logs are cascade deleted if removed + Removed: false, + } + result := core.HeaderSyncLog{ + ID: rawLog.ID, + HeaderID: rawLog.HeaderID, + Log: reconstructedLog, + Transformed: rawLog.Transformed, + } + // TODO: Consider returning each result async to avoid keeping large result sets in memory + results = append(results, result) + } + + return results, nil +} + +func (repository HeaderSyncLogRepository) CreateHeaderSyncLogs(headerID int64, logs []types.Log) error { + tx, txErr := repository.db.Beginx() + if txErr != nil { + return txErr + } + for _, log := range logs { + err := insertLog(headerID, log, tx) + if err != nil { + rollbackErr := tx.Rollback() + if rollbackErr != nil { + logrus.Errorf("failed to rollback header sync log insert: %s", rollbackErr.Error()) + } + return err + } + } + return tx.Commit() +} + +func insertLog(headerID int64, log types.Log, tx *sqlx.Tx) error { + topics := buildTopics(log) + raw, jsonErr := log.MarshalJSON() + if jsonErr != nil { + return jsonErr + } + _, insertErr := tx.Exec(insertHeaderSyncLogQuery, headerID, log.Address.Hex(), topics, log.Data, log.BlockNumber, + log.BlockHash.Hex(), log.TxIndex, log.TxHash.Hex(), log.Index, raw) + return insertErr +} + +func buildTopics(log types.Log) pq.ByteaArray { + var topics pq.ByteaArray + for _, topic := range log.Topics { + topics = append(topics, topic.Bytes()) + } + return topics +} diff --git a/pkg/datastore/postgres/repositories/header_sync_log_repository_test.go b/pkg/datastore/postgres/repositories/header_sync_log_repository_test.go new file mode 100644 index 00000000..2891b6ba --- /dev/null +++ b/pkg/datastore/postgres/repositories/header_sync_log_repository_test.go @@ -0,0 +1,203 @@ +// VulcanizeDB +// Copyright © 2019 Vulcanize + +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU Affero General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU Affero General Public License for more details. + +// You should have received a copy of the GNU Affero General Public License +// along with this program. If not, see . + +package repositories_test + +import ( + "github.com/ethereum/go-ethereum/common" + "github.com/ethereum/go-ethereum/core/types" + "github.com/lib/pq" + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + "github.com/vulcanize/vulcanizedb/libraries/shared/test_data" + "github.com/vulcanize/vulcanizedb/pkg/datastore" + "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres" + "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres/repositories" + "github.com/vulcanize/vulcanizedb/pkg/fakes" + "github.com/vulcanize/vulcanizedb/test_config" +) + +var _ = Describe("Header sync log repository", func() { + var ( + db *postgres.DB + headerID int64 + repository datastore.HeaderSyncLogRepository + ) + + BeforeEach(func() { + db = test_config.NewTestDB(test_config.NewTestNode()) + test_config.CleanTestDB(db) + headerRepository := repositories.NewHeaderRepository(db) + var headerErr error + headerID, headerErr = headerRepository.CreateOrUpdateHeader(fakes.FakeHeader) + Expect(headerErr).NotTo(HaveOccurred()) + repository = repositories.NewHeaderSyncLogRepository(db) + }) + + Describe("CreateHeaderSyncLogs", func() { + type HeaderSyncLog struct { + ID int64 + HeaderID int64 `db:"header_id"` + Address string + Topics pq.ByteaArray + Data []byte + BlockNumber uint64 `db:"block_number"` + BlockHash string `db:"block_hash"` + TxHash string `db:"tx_hash"` + TxIndex uint `db:"tx_index"` + LogIndex uint `db:"log_index"` + Transformed bool + Raw []byte + } + + It("writes a log to the db", func() { + log := test_data.GenericTestLog() + + err := repository.CreateHeaderSyncLogs(headerID, []types.Log{log}) + + Expect(err).NotTo(HaveOccurred()) + var dbLog HeaderSyncLog + lookupErr := db.Get(&dbLog, `SELECT * FROM header_sync_logs`) + Expect(lookupErr).NotTo(HaveOccurred()) + Expect(dbLog.ID).NotTo(BeZero()) + Expect(dbLog.HeaderID).To(Equal(headerID)) + Expect(dbLog.Address).To(Equal(log.Address.Hex())) + Expect(dbLog.Topics[0]).To(Equal(log.Topics[0].Bytes())) + Expect(dbLog.Topics[1]).To(Equal(log.Topics[1].Bytes())) + Expect(dbLog.Data).To(Equal(log.Data)) + Expect(dbLog.BlockNumber).To(Equal(log.BlockNumber)) + Expect(dbLog.BlockHash).To(Equal(log.BlockHash.Hex())) + Expect(dbLog.TxIndex).To(Equal(log.TxIndex)) + Expect(dbLog.TxHash).To(Equal(log.TxHash.Hex())) + Expect(dbLog.LogIndex).To(Equal(log.Index)) + expectedRaw, jsonErr := log.MarshalJSON() + Expect(jsonErr).NotTo(HaveOccurred()) + Expect(dbLog.Raw).To(MatchJSON(expectedRaw)) + Expect(dbLog.Transformed).To(BeFalse()) + }) + + It("writes several logs to the db", func() { + log1 := test_data.GenericTestLog() + log2 := test_data.GenericTestLog() + logs := []types.Log{log1, log2} + + err := repository.CreateHeaderSyncLogs(headerID, logs) + + Expect(err).NotTo(HaveOccurred()) + var count int + lookupErr := db.Get(&count, `SELECT COUNT(*) FROM header_sync_logs`) + Expect(lookupErr).NotTo(HaveOccurred()) + Expect(count).To(Equal(len(logs))) + }) + + It("persists record that can be unpacked into types.Log", func() { + // important if we want to decouple log persistence from transforming and still make use of + // tools on types.Log like abi.Unpack + log := test_data.GenericTestLog() + + err := repository.CreateHeaderSyncLogs(headerID, []types.Log{log}) + + Expect(err).NotTo(HaveOccurred()) + var dbLog HeaderSyncLog + lookupErr := db.Get(&dbLog, `SELECT * FROM header_sync_logs`) + Expect(lookupErr).NotTo(HaveOccurred()) + + var logTopics []common.Hash + for _, topic := range dbLog.Topics { + logTopics = append(logTopics, common.BytesToHash(topic)) + } + + reconstructedLog := types.Log{ + Address: common.HexToAddress(dbLog.Address), + Topics: logTopics, + Data: dbLog.Data, + BlockNumber: dbLog.BlockNumber, + TxHash: common.HexToHash(dbLog.TxHash), + TxIndex: dbLog.TxIndex, + BlockHash: common.HexToHash(dbLog.BlockHash), + Index: dbLog.LogIndex, + Removed: false, + } + Expect(reconstructedLog).To(Equal(log)) + }) + + It("does not duplicate logs", func() { + log := test_data.GenericTestLog() + + err := repository.CreateHeaderSyncLogs(headerID, []types.Log{log, log}) + + Expect(err).NotTo(HaveOccurred()) + var count int + lookupErr := db.Get(&count, `SELECT COUNT(*) FROM header_sync_logs`) + Expect(lookupErr).NotTo(HaveOccurred()) + Expect(count).To(Equal(1)) + }) + }) + + Describe("GetFullSyncLogs", func() { + Describe("when there are no logs", func() { + It("returns empty collection", func() { + result, err := repository.GetUntransformedHeaderSyncLogs() + + Expect(err).NotTo(HaveOccurred()) + Expect(len(result)).To(BeZero()) + }) + }) + + Describe("when there are logs", func() { + var log1, log2 types.Log + + BeforeEach(func() { + log1 = test_data.GenericTestLog() + log2 = test_data.GenericTestLog() + logs := []types.Log{log1, log2} + logsErr := repository.CreateHeaderSyncLogs(headerID, logs) + Expect(logsErr).NotTo(HaveOccurred()) + }) + + It("returns persisted logs", func() { + result, err := repository.GetUntransformedHeaderSyncLogs() + + Expect(err).NotTo(HaveOccurred()) + Expect(len(result)).To(Equal(2)) + Expect(result[0].Log).To(Or(Equal(log1), Equal(log2))) + Expect(result[1].Log).To(Or(Equal(log1), Equal(log2))) + Expect(result[0].Log).NotTo(Equal(result[1].Log)) + }) + + It("excludes logs that have been transformed", func() { + _, insertErr := db.Exec(`UPDATE public.header_sync_logs SET transformed = true WHERE tx_hash = $1`, log1.TxHash.Hex()) + Expect(insertErr).NotTo(HaveOccurred()) + + result, err := repository.GetUntransformedHeaderSyncLogs() + + Expect(err).NotTo(HaveOccurred()) + Expect(len(result)).To(Equal(1)) + Expect(result[0].Log).To(Equal(log2)) + }) + + It("returns empty collection if all logs transformed", func() { + _, insertErr := db.Exec(`UPDATE public.header_sync_logs SET transformed = true WHERE header_id = $1`, headerID) + Expect(insertErr).NotTo(HaveOccurred()) + + result, err := repository.GetUntransformedHeaderSyncLogs() + + Expect(err).NotTo(HaveOccurred()) + Expect(len(result)).To(BeZero()) + }) + }) + }) +}) diff --git a/pkg/datastore/postgres/repositories/watched_events_repository_test.go b/pkg/datastore/postgres/repositories/watched_events_repository_test.go index 23ff8af6..d639381e 100644 --- a/pkg/datastore/postgres/repositories/watched_events_repository_test.go +++ b/pkg/datastore/postgres/repositories/watched_events_repository_test.go @@ -32,7 +32,7 @@ var _ = Describe("Watched Events Repository", func() { var db *postgres.DB var blocksRepository datastore.BlockRepository var filterRepository datastore.FilterRepository - var logRepository datastore.LogRepository + var logRepository datastore.FullSyncLogRepository var receiptRepository datastore.FullSyncReceiptRepository var watchedEventRepository datastore.WatchedEventRepository @@ -41,7 +41,7 @@ var _ = Describe("Watched Events Repository", func() { test_config.CleanTestDB(db) blocksRepository = repositories.NewBlockRepository(db) filterRepository = repositories.FilterRepository{DB: db} - logRepository = repositories.LogRepository{DB: db} + logRepository = repositories.FullSyncLogRepository{DB: db} receiptRepository = repositories.FullSyncReceiptRepository{DB: db} watchedEventRepository = repositories.WatchedEventRepository{DB: db} }) @@ -54,7 +54,7 @@ var _ = Describe("Watched Events Repository", func() { Address: "0x123", Topics: core.Topics{0: "event1=10", 2: "event3=hello"}, } - logs := []core.Log{ + logs := []core.FullSyncLog{ { BlockNumber: 0, TxHash: "0x1", @@ -108,7 +108,7 @@ var _ = Describe("Watched Events Repository", func() { Address: "0x123", Topics: core.Topics{0: "event1=10", 2: "event3=hello"}, } - logs := []core.Log{ + logs := []core.FullSyncLog{ { BlockNumber: 0, TxHash: "0x1", diff --git a/pkg/datastore/repository.go b/pkg/datastore/repository.go index fe8bad9c..2fbeaf9e 100644 --- a/pkg/datastore/repository.go +++ b/pkg/datastore/repository.go @@ -17,6 +17,7 @@ package datastore import ( + "github.com/ethereum/go-ethereum/core/types" "github.com/jmoiron/sqlx" "github.com/vulcanize/vulcanizedb/pkg/core" "github.com/vulcanize/vulcanizedb/pkg/filters" @@ -33,6 +34,11 @@ type BlockRepository interface { SetBlocksStatus(chainHead int64) error } +type CheckedHeadersRepository interface { + MarkHeaderChecked(headerID int64) error + MissingHeaders(startingBlockNumber, endingBlockNumber, checkCount int64) ([]core.Header, error) +} + type ContractRepository interface { CreateContract(contract core.Contract) error GetContract(contractHash string) (core.Contract, error) @@ -44,6 +50,11 @@ type FilterRepository interface { GetFilter(name string) (filters.LogFilter, error) } +type FullSyncLogRepository interface { + CreateLogs(logs []core.FullSyncLog, receiptId int64) error + GetLogs(address string, blockNumber int64) ([]core.FullSyncLog, error) +} + type HeaderRepository interface { CreateOrUpdateHeader(header core.Header) (int64, error) CreateTransactions(headerID int64, transactions []core.TransactionModel) error @@ -51,9 +62,9 @@ type HeaderRepository interface { MissingBlockNumbers(startingBlockNumber, endingBlockNumber int64, nodeID string) ([]int64, error) } -type LogRepository interface { - CreateLogs(logs []core.Log, receiptId int64) error - GetLogs(address string, blockNumber int64) ([]core.Log, error) +type HeaderSyncLogRepository interface { + GetUntransformedHeaderSyncLogs() ([]core.HeaderSyncLog, error) + CreateHeaderSyncLogs(headerID int64, logs []types.Log) error } type FullSyncReceiptRepository interface { diff --git a/pkg/fakes/mock_blockchain.go b/pkg/fakes/mock_blockchain.go index 8ac7f825..53704915 100644 --- a/pkg/fakes/mock_blockchain.go +++ b/pkg/fakes/mock_blockchain.go @@ -107,8 +107,8 @@ func (chain *MockBlockChain) GetHeadersByNumbers(blockNumbers []int64) ([]core.H return headers, nil } -func (chain *MockBlockChain) GetLogs(contract core.Contract, startingBlockNumber, endingBlockNumber *big.Int) ([]core.Log, error) { - return []core.Log{}, nil +func (chain *MockBlockChain) GetFullSyncLogs(contract core.Contract, startingBlockNumber, endingBlockNumber *big.Int) ([]core.FullSyncLog, error) { + return []core.FullSyncLog{}, nil } func (chain *MockBlockChain) GetTransactions(transactionHashes []common.Hash) ([]core.TransactionModel, error) { diff --git a/pkg/fakes/mock_checked_headers_repository.go b/pkg/fakes/mock_checked_headers_repository.go new file mode 100644 index 00000000..4e574b07 --- /dev/null +++ b/pkg/fakes/mock_checked_headers_repository.go @@ -0,0 +1,43 @@ +// VulcanizeDB +// Copyright © 2019 Vulcanize + +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU Affero General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU Affero General Public License for more details. + +// You should have received a copy of the GNU Affero General Public License +// along with this program. If not, see . + +package fakes + +import ( + "github.com/vulcanize/vulcanizedb/pkg/core" +) + +type MockCheckedHeadersRepository struct { + CheckCount int64 + StartingBlockNumber int64 + EndingBlockNumber int64 + HeaderID int64 + ReturnHeaders []core.Header + MarkHeaderCheckedReturnError error + MissingHeadersReturnError error +} + +func (repository *MockCheckedHeadersRepository) MarkHeaderChecked(headerID int64) error { + repository.HeaderID = headerID + return repository.MarkHeaderCheckedReturnError +} + +func (repository *MockCheckedHeadersRepository) MissingHeaders(startingBlockNumber, endingBlockNumber, checkCount int64) ([]core.Header, error) { + repository.StartingBlockNumber = startingBlockNumber + repository.EndingBlockNumber = endingBlockNumber + repository.CheckCount = checkCount + return repository.ReturnHeaders, repository.MissingHeadersReturnError +} diff --git a/pkg/fakes/mock_header_sync_log_repository.go b/pkg/fakes/mock_header_sync_log_repository.go new file mode 100644 index 00000000..5fca524b --- /dev/null +++ b/pkg/fakes/mock_header_sync_log_repository.go @@ -0,0 +1,42 @@ +// VulcanizeDB +// Copyright © 2019 Vulcanize + +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU Affero General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU Affero General Public License for more details. + +// You should have received a copy of the GNU Affero General Public License +// along with this program. If not, see . + +package fakes + +import ( + "github.com/ethereum/go-ethereum/core/types" + "github.com/vulcanize/vulcanizedb/pkg/core" +) + +type MockHeaderSyncLogRepository struct { + CreateError error + GetCalled bool + GetError error + PassedHeaderID int64 + PassedLogs []types.Log + ReturnLogs []core.HeaderSyncLog +} + +func (repository *MockHeaderSyncLogRepository) GetUntransformedHeaderSyncLogs() ([]core.HeaderSyncLog, error) { + repository.GetCalled = true + return repository.ReturnLogs, repository.GetError +} + +func (repository *MockHeaderSyncLogRepository) CreateHeaderSyncLogs(headerID int64, logs []types.Log) error { + repository.PassedHeaderID = headerID + repository.PassedLogs = logs + return repository.CreateError +} diff --git a/pkg/geth/blockchain.go b/pkg/geth/blockchain.go index 5274fd3e..f834654a 100644 --- a/pkg/geth/blockchain.go +++ b/pkg/geth/blockchain.go @@ -86,7 +86,7 @@ func (blockChain *BlockChain) GetHeadersByNumbers(blockNumbers []int64) (header return blockChain.getPOWHeaders(blockNumbers) } -func (blockChain *BlockChain) GetLogs(contract core.Contract, startingBlockNumber, endingBlockNumber *big.Int) ([]core.Log, error) { +func (blockChain *BlockChain) GetFullSyncLogs(contract core.Contract, startingBlockNumber, endingBlockNumber *big.Int) ([]core.FullSyncLog, error) { if endingBlockNumber == nil { endingBlockNumber = startingBlockNumber } @@ -99,9 +99,9 @@ func (blockChain *BlockChain) GetLogs(contract core.Contract, startingBlockNumbe } gethLogs, err := blockChain.GetEthLogsWithCustomQuery(fc) if err != nil { - return []core.Log{}, err + return []core.FullSyncLog{}, err } - logs := vulcCommon.ToCoreLogs(gethLogs) + logs := vulcCommon.ToFullSyncLogs(gethLogs) return logs, nil } diff --git a/pkg/geth/blockchain_test.go b/pkg/geth/blockchain_test.go index e0a8e9c2..21b71cf3 100644 --- a/pkg/geth/blockchain_test.go +++ b/pkg/geth/blockchain_test.go @@ -154,7 +154,7 @@ var _ = Describe("Geth blockchain", func() { startingBlockNumber := big.NewInt(1) endingBlockNumber := big.NewInt(2) - _, err := blockChain.GetLogs(contract, startingBlockNumber, endingBlockNumber) + _, err := blockChain.GetFullSyncLogs(contract, startingBlockNumber, endingBlockNumber) Expect(err).NotTo(HaveOccurred()) expectedQuery := ethereum.FilterQuery{ @@ -171,7 +171,7 @@ var _ = Describe("Geth blockchain", func() { startingBlockNumber := big.NewInt(1) endingBlockNumber := big.NewInt(2) - _, err := blockChain.GetLogs(contract, startingBlockNumber, endingBlockNumber) + _, err := blockChain.GetFullSyncLogs(contract, startingBlockNumber, endingBlockNumber) Expect(err).To(HaveOccurred()) Expect(err).To(MatchError(fakes.FakeError)) diff --git a/pkg/geth/converters/common/log_converter.go b/pkg/geth/converters/common/full_sync_log_converter.go similarity index 89% rename from pkg/geth/converters/common/log_converter.go rename to pkg/geth/converters/common/full_sync_log_converter.go index 8904a720..df6805d4 100644 --- a/pkg/geth/converters/common/log_converter.go +++ b/pkg/geth/converters/common/full_sync_log_converter.go @@ -26,8 +26,8 @@ import ( "github.com/vulcanize/vulcanizedb/pkg/core" ) -func ToCoreLogs(gethLogs []types.Log) []core.Log { - var logs []core.Log +func ToFullSyncLogs(gethLogs []types.Log) []core.FullSyncLog { + var logs []core.FullSyncLog for _, log := range gethLogs { log := ToCoreLog(log) logs = append(logs, log) @@ -43,10 +43,10 @@ func makeTopics(topics []common.Hash) core.Topics { return hexTopics } -func ToCoreLog(gethLog types.Log) core.Log { +func ToCoreLog(gethLog types.Log) core.FullSyncLog { topics := gethLog.Topics hexTopics := makeTopics(topics) - return core.Log{ + return core.FullSyncLog{ Address: strings.ToLower(gethLog.Address.Hex()), BlockNumber: int64(gethLog.BlockNumber), Topics: hexTopics, diff --git a/pkg/geth/converters/common/log_converter_test.go b/pkg/geth/converters/common/full_sync_log_converter_test.go similarity index 95% rename from pkg/geth/converters/common/log_converter_test.go rename to pkg/geth/converters/common/full_sync_log_converter_test.go index 6a5505cb..fb0ad9c2 100644 --- a/pkg/geth/converters/common/log_converter_test.go +++ b/pkg/geth/converters/common/full_sync_log_converter_test.go @@ -29,7 +29,7 @@ import ( vulcCommon "github.com/vulcanize/vulcanizedb/pkg/geth/converters/common" ) -var _ = Describe("Conversion of GethLog to core.Log", func() { +var _ = Describe("Conversion of GethLog to core.FullSyncLog", func() { It("converts geth log to internal log format", func() { gethLog := types.Log{ @@ -46,7 +46,7 @@ var _ = Describe("Conversion of GethLog to core.Log", func() { }, } - expected := core.Log{ + expected := core.FullSyncLog{ Address: strings.ToLower(gethLog.Address.Hex()), BlockNumber: int64(gethLog.BlockNumber), Data: hexutil.Encode(gethLog.Data), @@ -101,7 +101,7 @@ var _ = Describe("Conversion of GethLog to core.Log", func() { expectedOne := vulcCommon.ToCoreLog(gethLogOne) expectedTwo := vulcCommon.ToCoreLog(gethLogTwo) - coreLogs := vulcCommon.ToCoreLogs([]types.Log{gethLogOne, gethLogTwo}) + coreLogs := vulcCommon.ToFullSyncLogs([]types.Log{gethLogOne, gethLogTwo}) Expect(len(coreLogs)).To(Equal(2)) Expect(coreLogs[0]).To(Equal(expectedOne)) diff --git a/pkg/geth/converters/common/receipt_converter.go b/pkg/geth/converters/common/receipt_converter.go index 50b365d3..880664bd 100644 --- a/pkg/geth/converters/common/receipt_converter.go +++ b/pkg/geth/converters/common/receipt_converter.go @@ -73,8 +73,8 @@ func setContractAddress(gethReceipt *types.Receipt) string { return gethReceipt.ContractAddress.Hex() } -func dereferenceLogs(gethReceipt *types.Receipt) []core.Log { - logs := []core.Log{} +func dereferenceLogs(gethReceipt *types.Receipt) []core.FullSyncLog { + logs := []core.FullSyncLog{} for _, log := range gethReceipt.Logs { logs = append(logs, ToCoreLog(*log)) } diff --git a/pkg/geth/converters/common/receipt_converter_test.go b/pkg/geth/converters/common/receipt_converter_test.go index bd34ca33..6788d80c 100644 --- a/pkg/geth/converters/common/receipt_converter_test.go +++ b/pkg/geth/converters/common/receipt_converter_test.go @@ -51,7 +51,7 @@ var _ = Describe("Conversion of GethReceipt to core.Receipt", func() { ContractAddress: "", CumulativeGasUsed: 25000, GasUsed: 21000, - Logs: []core.Log{}, + Logs: []core.FullSyncLog{}, StateRoot: "0x88abf7e73128227370aa7baa3dd4e18d0af70e92ef1f9ef426942fbe2dddb733", Status: -99, TxHash: receipt.TxHash.Hex(), @@ -92,7 +92,7 @@ var _ = Describe("Conversion of GethReceipt to core.Receipt", func() { ContractAddress: receipt.ContractAddress.Hex(), CumulativeGasUsed: 7996119, GasUsed: 21000, - Logs: []core.Log{}, + Logs: []core.FullSyncLog{}, StateRoot: "", Status: 1, TxHash: receipt.TxHash.Hex(), diff --git a/test_config/test_config.go b/test_config/test_config.go index 0d3ee032..f9fb3975 100644 --- a/test_config/test_config.go +++ b/test_config/test_config.go @@ -19,16 +19,14 @@ package test_config import ( "errors" "fmt" - "os" - . "github.com/onsi/gomega" - log "github.com/sirupsen/logrus" + "github.com/sirupsen/logrus" "github.com/spf13/viper" - "github.com/vulcanize/vulcanizedb/pkg/config" "github.com/vulcanize/vulcanizedb/pkg/core" "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres" "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres/repositories" + "os" ) var TestConfig *viper.Viper @@ -50,7 +48,7 @@ func setTestConfig() { TestConfig.AddConfigPath("$GOPATH/src/github.com/vulcanize/vulcanizedb/environments/") err := TestConfig.ReadInConfig() if err != nil { - log.Fatal(err) + logrus.Fatal(err) } ipc := TestConfig.GetString("client.ipcPath") hn := TestConfig.GetString("database.hostname") @@ -73,7 +71,7 @@ func setInfuraConfig() { Infura.AddConfigPath("$GOPATH/src/github.com/vulcanize/vulcanizedb/environments/") err := Infura.ReadInConfig() if err != nil { - log.Fatal(err) + logrus.Fatal(err) } ipc := Infura.GetString("client.ipcpath") @@ -83,7 +81,7 @@ func setInfuraConfig() { ipc = Infura.GetString("url") } if ipc == "" { - log.Fatal(errors.New("infura.toml IPC path or $INFURA_URL env variable need to be set")) + logrus.Fatal(errors.New("infura.toml IPC path or $INFURA_URL env variable need to be set")) } InfuraClient = config.Client{ From 63dabbb05190ec98391d78c0c9af6a09af8d62a5 Mon Sep 17 00:00:00 2001 From: Rob Mulholand Date: Mon, 12 Aug 2019 12:26:49 -0500 Subject: [PATCH 05/21] Extract and delegate logs concurrently --- cmd/execute.go | 11 +- libraries/shared/logs/delegator.go | 19 +- libraries/shared/logs/delegator_test.go | 110 ++++-- libraries/shared/logs/extractor.go | 28 +- libraries/shared/logs/extractor_test.go | 333 +++++++++++------- libraries/shared/mocks/log_delegator.go | 18 +- libraries/shared/mocks/log_extractor.go | 20 +- libraries/shared/watcher/event_watcher.go | 71 +++- .../shared/watcher/event_watcher_test.go | 119 +++++-- .../checked_headers_repository.go | 6 +- .../checked_headers_repository_test.go | 25 -- 11 files changed, 506 insertions(+), 254 deletions(-) diff --git a/cmd/execute.go b/cmd/execute.go index c6d7bb15..108eba82 100644 --- a/cmd/execute.go +++ b/cmd/execute.go @@ -157,10 +157,13 @@ func watchEthEvents(w *watcher.EventWatcher, wg *syn.WaitGroup) { } else { recheck = constants.HeaderMissing } - ticker := time.NewTicker(pollingInterval) - defer ticker.Stop() - for range ticker.C { - w.Execute(recheck) + errs := make(chan error) + go w.Execute(recheck, errs) + for { + select { + case err := <-errs: + LogWithCommand.Fatalf("error executing event watcher: %s", err.Error()) + } } } diff --git a/libraries/shared/logs/delegator.go b/libraries/shared/logs/delegator.go index 67637a53..f04023f2 100644 --- a/libraries/shared/logs/delegator.go +++ b/libraries/shared/logs/delegator.go @@ -29,7 +29,7 @@ var ErrNoTransformers = errors.New("no event transformers configured in the log type ILogDelegator interface { AddTransformer(t transformer.EventTransformer) - DelegateLogs() error + DelegateLogs(errs chan error, logsFound chan bool) } type LogDelegator struct { @@ -43,24 +43,31 @@ func (delegator *LogDelegator) AddTransformer(t transformer.EventTransformer) { delegator.Chunker.AddConfig(t.GetConfig()) } -func (delegator LogDelegator) DelegateLogs() error { +func (delegator *LogDelegator) DelegateLogs(errs chan error, logsFound chan bool) { if len(delegator.Transformers) < 1 { - return ErrNoTransformers + errs <- ErrNoTransformers + return } persistedLogs, fetchErr := delegator.LogRepository.GetUntransformedHeaderSyncLogs() if fetchErr != nil { logrus.Errorf("error loading logs from db: %s", fetchErr.Error()) - return fetchErr + errs <- fetchErr + return + } + + if len(persistedLogs) < 1 { + logsFound <- false } transformErr := delegator.delegateLogs(persistedLogs) if transformErr != nil { logrus.Errorf("error transforming logs: %s", transformErr) - return transformErr + errs <- transformErr + return } - return nil + logsFound <- true } func (delegator *LogDelegator) delegateLogs(logs []core.HeaderSyncLog) error { diff --git a/libraries/shared/logs/delegator_test.go b/libraries/shared/logs/delegator_test.go index 15e59a9d..c1132bd5 100644 --- a/libraries/shared/logs/delegator_test.go +++ b/libraries/shared/logs/delegator_test.go @@ -59,75 +59,123 @@ var _ = Describe("Log delegator", func() { }) Describe("DelegateLogs", func() { - It("returns an error if no transformers configured", func() { - delegator := newDelegator(&fakes.MockHeaderSyncLogRepository{}) + var ( + errsChan chan error + logsFound chan bool + ) - err := delegator.DelegateLogs() - - Expect(err).To(HaveOccurred()) - Expect(err).To(MatchError(logs.ErrNoTransformers)) + BeforeEach(func() { + errsChan = make(chan error) + logsFound = make(chan bool) }) - It("gets untransformed logs", func() { + It("returns an error if no transformers configured", func(done Done) { + delegator := newDelegator(&fakes.MockHeaderSyncLogRepository{}) + + go delegator.DelegateLogs(errsChan, logsFound) + + Expect(<-errsChan).To(MatchError(logs.ErrNoTransformers)) + close(done) + }) + + It("gets untransformed logs", func(done Done) { mockLogRepository := &fakes.MockHeaderSyncLogRepository{} delegator := newDelegator(mockLogRepository) delegator.AddTransformer(&mocks.MockEventTransformer{}) - err := delegator.DelegateLogs() + go delegator.DelegateLogs(errsChan, logsFound) - Expect(err).NotTo(HaveOccurred()) - Expect(mockLogRepository.GetCalled).To(BeTrue()) + Eventually(func() bool { + return mockLogRepository.GetCalled + }).Should(BeTrue()) + close(done) }) - It("returns error if getting untransformed logs fails", func() { + It("emits error if getting untransformed logs fails", func(done Done) { mockLogRepository := &fakes.MockHeaderSyncLogRepository{} mockLogRepository.GetError = fakes.FakeError delegator := newDelegator(mockLogRepository) delegator.AddTransformer(&mocks.MockEventTransformer{}) - err := delegator.DelegateLogs() + go delegator.DelegateLogs(errsChan, logsFound) - Expect(err).To(HaveOccurred()) - Expect(err).To(MatchError(fakes.FakeError)) + Expect(<-errsChan).To(MatchError(fakes.FakeError)) + close(done) }) - It("delegates chunked logs to transformers", func() { + It("emits that no logs were found if no logs returned", func(done Done) { + delegator := newDelegator(&fakes.MockHeaderSyncLogRepository{}) + delegator.AddTransformer(&mocks.MockEventTransformer{}) + + go delegator.DelegateLogs(errsChan, logsFound) + + Expect(<-logsFound).To(BeFalse()) + close(done) + }) + + It("delegates chunked logs to transformers", func(done Done) { fakeTransformer := &mocks.MockEventTransformer{} - fakeTransformer.SetTransformerConfig(mocks.FakeTransformerConfig) + config := mocks.FakeTransformerConfig + fakeTransformer.SetTransformerConfig(config) fakeGethLog := types.Log{ - Address: common.HexToAddress(fakeTransformer.GetConfig().ContractAddresses[0]), - Topics: []common.Hash{common.HexToHash(fakeTransformer.GetConfig().Topic)}, + Address: common.HexToAddress(config.ContractAddresses[0]), + Topics: []common.Hash{common.HexToHash(config.Topic)}, } - fakeHeaderSyncLog := core.HeaderSyncLog{Log: fakeGethLog} - fakeHeaderSyncLogs := []core.HeaderSyncLog{fakeHeaderSyncLog} + fakeHeaderSyncLogs := []core.HeaderSyncLog{{Log: fakeGethLog}} mockLogRepository := &fakes.MockHeaderSyncLogRepository{} mockLogRepository.ReturnLogs = fakeHeaderSyncLogs delegator := newDelegator(mockLogRepository) delegator.AddTransformer(fakeTransformer) - err := delegator.DelegateLogs() + go delegator.DelegateLogs(errsChan, logsFound) - Expect(err).NotTo(HaveOccurred()) - Expect(fakeTransformer.ExecuteWasCalled).To(BeTrue()) - Expect(fakeTransformer.PassedLogs).To(Equal(fakeHeaderSyncLogs)) + Eventually(func() bool { + return fakeTransformer.ExecuteWasCalled + }).Should(BeTrue()) + Eventually(func() []core.HeaderSyncLog { + return fakeTransformer.PassedLogs + }).Should(Equal(fakeHeaderSyncLogs)) + close(done) }) - It("returns an error if transformer returns an error", func() { - delegator := newDelegator(&fakes.MockHeaderSyncLogRepository{}) + It("emits error if transformer returns an error", func(done Done) { + mockLogRepository := &fakes.MockHeaderSyncLogRepository{} + mockLogRepository.ReturnLogs = []core.HeaderSyncLog{{}} + delegator := newDelegator(mockLogRepository) fakeTransformer := &mocks.MockEventTransformer{ExecuteError: fakes.FakeError} delegator.AddTransformer(fakeTransformer) - err := delegator.DelegateLogs() + go delegator.DelegateLogs(errsChan, logsFound) - Expect(err).To(HaveOccurred()) - Expect(err).To(MatchError(fakes.FakeError)) + Expect(<-errsChan).To(MatchError(fakes.FakeError)) + close(done) + }) + + It("emits logs found when logs returned and delegated", func(done Done) { + fakeTransformer := &mocks.MockEventTransformer{} + config := mocks.FakeTransformerConfig + fakeTransformer.SetTransformerConfig(config) + fakeGethLog := types.Log{ + Address: common.HexToAddress(config.ContractAddresses[0]), + Topics: []common.Hash{common.HexToHash(config.Topic)}, + } + fakeHeaderSyncLogs := []core.HeaderSyncLog{{Log: fakeGethLog}} + mockLogRepository := &fakes.MockHeaderSyncLogRepository{} + mockLogRepository.ReturnLogs = fakeHeaderSyncLogs + delegator := newDelegator(mockLogRepository) + delegator.AddTransformer(fakeTransformer) + + go delegator.DelegateLogs(errsChan, logsFound) + + Expect(<-logsFound).To(BeTrue()) + close(done) }) }) }) -func newDelegator(headerSyncLogRepository *fakes.MockHeaderSyncLogRepository) logs.LogDelegator { - return logs.LogDelegator{ +func newDelegator(headerSyncLogRepository *fakes.MockHeaderSyncLogRepository) *logs.LogDelegator { + return &logs.LogDelegator{ Chunker: chunker.NewLogChunker(), LogRepository: headerSyncLogRepository, } diff --git a/libraries/shared/logs/extractor.go b/libraries/shared/logs/extractor.go index fccce1b5..5c42dd8a 100644 --- a/libraries/shared/logs/extractor.go +++ b/libraries/shared/logs/extractor.go @@ -32,7 +32,7 @@ var ErrNoWatchedAddresses = errors.New("no watched addresses configured in the l type ILogExtractor interface { AddTransformerConfig(config transformer.EventTransformerConfig) - ExtractLogs(recheckHeaders constants.TransformerExecution) error + ExtractLogs(recheckHeaders constants.TransformerExecution, errs chan error, missingHeadersFound chan bool) } type LogExtractor struct { @@ -59,46 +59,56 @@ func (extractor *LogExtractor) AddTransformerConfig(config transformer.EventTran } // Fetch and persist watched logs -func (extractor LogExtractor) ExtractLogs(recheckHeaders constants.TransformerExecution) error { +func (extractor LogExtractor) ExtractLogs(recheckHeaders constants.TransformerExecution, errs chan error, missingHeadersFound chan bool) { if len(extractor.Addresses) < 1 { logrus.Errorf("error extracting logs: %s", ErrNoWatchedAddresses.Error()) - return ErrNoWatchedAddresses + errs <- ErrNoWatchedAddresses + return } missingHeaders, missingHeadersErr := extractor.CheckedHeadersRepository.MissingHeaders(*extractor.StartingBlock, -1, getCheckCount(recheckHeaders)) if missingHeadersErr != nil { logrus.Errorf("error fetching missing headers: %s", missingHeadersErr) - return missingHeadersErr + errs <- missingHeadersErr + return + } + + if len(missingHeaders) < 1 { + missingHeadersFound <- false + return } for _, header := range missingHeaders { logs, fetchLogsErr := extractor.Fetcher.FetchLogs(extractor.Addresses, extractor.Topics, header) if fetchLogsErr != nil { logError("error fetching logs for header: %s", fetchLogsErr, header) - return fetchLogsErr + errs <- fetchLogsErr + return } if len(logs) > 0 { transactionsSyncErr := extractor.Syncer.SyncTransactions(header.Id, logs) if transactionsSyncErr != nil { logError("error syncing transactions: %s", transactionsSyncErr, header) - return transactionsSyncErr + errs <- transactionsSyncErr + return } createLogsErr := extractor.LogRepository.CreateHeaderSyncLogs(header.Id, logs) if createLogsErr != nil { logError("error persisting logs: %s", createLogsErr, header) - return createLogsErr + errs <- createLogsErr + return } } markHeaderCheckedErr := extractor.CheckedHeadersRepository.MarkHeaderChecked(header.Id) if markHeaderCheckedErr != nil { logError("error marking header checked: %s", markHeaderCheckedErr, header) - return markHeaderCheckedErr + errs <- markHeaderCheckedErr } } - return nil + missingHeadersFound <- true } func earlierStartingBlockNumber(transformerBlock, watcherBlock int64) bool { diff --git a/libraries/shared/logs/extractor_test.go b/libraries/shared/logs/extractor_test.go index 151086be..539c4c29 100644 --- a/libraries/shared/logs/extractor_test.go +++ b/libraries/shared/logs/extractor_test.go @@ -81,201 +81,270 @@ var _ = Describe("Log extractor", func() { }) Describe("ExtractLogs", func() { - It("returns error if no watched addresses configured", func() { - err := extractor.ExtractLogs(constants.HeaderMissing) + var ( + errsChan chan error + missingHeadersFound chan bool + ) - Expect(err).To(HaveOccurred()) - Expect(err).To(MatchError(logs.ErrNoWatchedAddresses)) + BeforeEach(func() { + errsChan = make(chan error) + missingHeadersFound = make(chan bool) + }) + + It("returns error if no watched addresses configured", func(done Done) { + go extractor.ExtractLogs(constants.HeaderMissing, errsChan, missingHeadersFound) + + Expect(<-errsChan).To(MatchError(logs.ErrNoWatchedAddresses)) + close(done) }) Describe("when checking missing headers", func() { - It("gets missing headers since configured starting block with check count < 1", func() { + It("gets missing headers since configured starting block with check_count < 1", func(done Done) { mockCheckedHeadersRepository := &fakes.MockCheckedHeadersRepository{} mockCheckedHeadersRepository.ReturnHeaders = []core.Header{{}} extractor.CheckedHeadersRepository = mockCheckedHeadersRepository startingBlockNumber := rand.Int63() extractor.AddTransformerConfig(getTransformerConfig(startingBlockNumber)) - err := extractor.ExtractLogs(constants.HeaderMissing) + go extractor.ExtractLogs(constants.HeaderMissing, errsChan, missingHeadersFound) - Expect(err).NotTo(HaveOccurred()) - Expect(mockCheckedHeadersRepository.StartingBlockNumber).To(Equal(startingBlockNumber)) - Expect(mockCheckedHeadersRepository.EndingBlockNumber).To(Equal(int64(-1))) - Expect(mockCheckedHeadersRepository.CheckCount).To(Equal(int64(1))) + Eventually(func() int64 { + return mockCheckedHeadersRepository.StartingBlockNumber + }).Should(Equal(startingBlockNumber)) + Eventually(func() int64 { + return mockCheckedHeadersRepository.EndingBlockNumber + }).Should(Equal(int64(-1))) + Eventually(func() int64 { + return mockCheckedHeadersRepository.CheckCount + }).Should(Equal(int64(1))) + close(done) }) }) Describe("when rechecking headers", func() { - It("gets missing headers since configured starting block with check count < 1", func() { + It("gets missing headers since configured starting block with check_count < RecheckHeaderCap", func(done Done) { mockCheckedHeadersRepository := &fakes.MockCheckedHeadersRepository{} mockCheckedHeadersRepository.ReturnHeaders = []core.Header{{}} extractor.CheckedHeadersRepository = mockCheckedHeadersRepository startingBlockNumber := rand.Int63() extractor.AddTransformerConfig(getTransformerConfig(startingBlockNumber)) - err := extractor.ExtractLogs(constants.HeaderRecheck) + go extractor.ExtractLogs(constants.HeaderRecheck, errsChan, missingHeadersFound) - Expect(err).NotTo(HaveOccurred()) - Expect(mockCheckedHeadersRepository.StartingBlockNumber).To(Equal(startingBlockNumber)) - Expect(mockCheckedHeadersRepository.EndingBlockNumber).To(Equal(int64(-1))) - Expect(mockCheckedHeadersRepository.CheckCount).To(Equal(constants.RecheckHeaderCap)) + Eventually(func() int64 { + return mockCheckedHeadersRepository.StartingBlockNumber + }).Should(Equal(startingBlockNumber)) + Eventually(func() int64 { + return mockCheckedHeadersRepository.EndingBlockNumber + }).Should(Equal(int64(-1))) + Eventually(func() int64 { + return mockCheckedHeadersRepository.CheckCount + }).Should(Equal(constants.RecheckHeaderCap)) + close(done) }) }) - It("returns error if getting missing headers fails", func() { + It("emits error if getting missing headers fails", func(done Done) { addTransformerConfig(extractor) mockCheckedHeadersRepository := &fakes.MockCheckedHeadersRepository{} mockCheckedHeadersRepository.MissingHeadersReturnError = fakes.FakeError extractor.CheckedHeadersRepository = mockCheckedHeadersRepository - err := extractor.ExtractLogs(constants.HeaderMissing) + go extractor.ExtractLogs(constants.HeaderMissing, errsChan, missingHeadersFound) - Expect(err).To(HaveOccurred()) - Expect(err).To(MatchError(fakes.FakeError)) + Expect(<-errsChan).To(MatchError(fakes.FakeError)) + close(done) }) - It("does not fetch logs if no missing headers", func() { - addTransformerConfig(extractor) - mockLogFetcher := &mocks.MockLogFetcher{} - extractor.Fetcher = mockLogFetcher + Describe("when no missing headers", func() { + It("does not fetch logs", func(done Done) { + addTransformerConfig(extractor) + mockLogFetcher := &mocks.MockLogFetcher{} + extractor.Fetcher = mockLogFetcher - err := extractor.ExtractLogs(constants.HeaderMissing) + go extractor.ExtractLogs(constants.HeaderMissing, errsChan, missingHeadersFound) - Expect(err).NotTo(HaveOccurred()) - Expect(mockLogFetcher.FetchCalled).To(BeFalse()) + Consistently(func() bool { + return mockLogFetcher.FetchCalled + }).Should(BeFalse()) + close(done) + }) + + It("emits that no missing headers were found", func(done Done) { + addTransformerConfig(extractor) + mockLogFetcher := &mocks.MockLogFetcher{} + extractor.Fetcher = mockLogFetcher + + go extractor.ExtractLogs(constants.HeaderMissing, errsChan, missingHeadersFound) + + Expect(<-missingHeadersFound).To(BeFalse()) + close(done) + }) }) - It("fetches logs for missing headers", func() { - addMissingHeader(extractor) - config := transformer.EventTransformerConfig{ - ContractAddresses: []string{fakes.FakeAddress.Hex()}, - Topic: fakes.FakeHash.Hex(), - StartingBlockNumber: rand.Int63(), - } - extractor.AddTransformerConfig(config) - mockLogFetcher := &mocks.MockLogFetcher{} - extractor.Fetcher = mockLogFetcher + Describe("when there are missing headers", func() { + It("fetches logs for missing headers", func(done Done) { + addMissingHeader(extractor) + config := transformer.EventTransformerConfig{ + ContractAddresses: []string{fakes.FakeAddress.Hex()}, + Topic: fakes.FakeHash.Hex(), + StartingBlockNumber: rand.Int63(), + } + extractor.AddTransformerConfig(config) + mockLogFetcher := &mocks.MockLogFetcher{} + extractor.Fetcher = mockLogFetcher - err := extractor.ExtractLogs(constants.HeaderMissing) + go extractor.ExtractLogs(constants.HeaderMissing, errsChan, missingHeadersFound) - Expect(err).NotTo(HaveOccurred()) - Expect(mockLogFetcher.FetchCalled).To(BeTrue()) - Expect(mockLogFetcher.Topics).To(Equal([]common.Hash{common.HexToHash(config.Topic)})) - Expect(mockLogFetcher.ContractAddresses).To(Equal(transformer.HexStringsToAddresses(config.ContractAddresses))) - }) + Eventually(func() bool { + return mockLogFetcher.FetchCalled + }).Should(BeTrue()) + expectedTopics := []common.Hash{common.HexToHash(config.Topic)} + Eventually(func() []common.Hash { + return mockLogFetcher.Topics + }).Should(Equal(expectedTopics)) + expectedAddresses := transformer.HexStringsToAddresses(config.ContractAddresses) + Eventually(func() []common.Address { + return mockLogFetcher.ContractAddresses + }).Should(Equal(expectedAddresses)) + close(done) + }) - It("returns error if fetching logs fails", func() { - addMissingHeader(extractor) - addTransformerConfig(extractor) - mockLogFetcher := &mocks.MockLogFetcher{} - mockLogFetcher.ReturnError = fakes.FakeError - extractor.Fetcher = mockLogFetcher + It("returns error if fetching logs fails", func(done Done) { + addMissingHeader(extractor) + addTransformerConfig(extractor) + mockLogFetcher := &mocks.MockLogFetcher{} + mockLogFetcher.ReturnError = fakes.FakeError + extractor.Fetcher = mockLogFetcher - err := extractor.ExtractLogs(constants.HeaderMissing) + go extractor.ExtractLogs(constants.HeaderMissing, errsChan, missingHeadersFound) - Expect(err).To(HaveOccurred()) - Expect(err).To(MatchError(fakes.FakeError)) - }) + Expect(<-errsChan).To(MatchError(fakes.FakeError)) + close(done) + }) - It("does not sync transactions if no fetched logs", func() { - addMissingHeader(extractor) - addTransformerConfig(extractor) - mockTransactionSyncer := &fakes.MockTransactionSyncer{} - extractor.Syncer = mockTransactionSyncer + Describe("when no fetched logs", func() { + It("does not sync transactions", func(done Done) { + addMissingHeader(extractor) + addTransformerConfig(extractor) + mockTransactionSyncer := &fakes.MockTransactionSyncer{} + extractor.Syncer = mockTransactionSyncer - err := extractor.ExtractLogs(constants.HeaderMissing) + go extractor.ExtractLogs(constants.HeaderMissing, errsChan, missingHeadersFound) - Expect(err).NotTo(HaveOccurred()) - Expect(mockTransactionSyncer.SyncTransactionsCalled).To(BeFalse()) - }) + Consistently(func() bool { + return mockTransactionSyncer.SyncTransactionsCalled + }).Should(BeFalse()) + close(done) + }) + }) - It("syncs transactions for fetched logs", func() { - addMissingHeader(extractor) - addFetchedLog(extractor) - addTransformerConfig(extractor) - mockTransactionSyncer := &fakes.MockTransactionSyncer{} - extractor.Syncer = mockTransactionSyncer + Describe("when there are fetched logs", func() { + It("syncs transactions", func(done Done) { + addMissingHeader(extractor) + addFetchedLog(extractor) + addTransformerConfig(extractor) + mockTransactionSyncer := &fakes.MockTransactionSyncer{} + extractor.Syncer = mockTransactionSyncer - err := extractor.ExtractLogs(constants.HeaderMissing) + go extractor.ExtractLogs(constants.HeaderMissing, errsChan, missingHeadersFound) - Expect(err).NotTo(HaveOccurred()) - Expect(mockTransactionSyncer.SyncTransactionsCalled).To(BeTrue()) - }) + Eventually(func() bool { + return mockTransactionSyncer.SyncTransactionsCalled + }).Should(BeTrue()) + close(done) + }) - It("returns error if syncing transactions fails", func() { - addMissingHeader(extractor) - addFetchedLog(extractor) - addTransformerConfig(extractor) - mockTransactionSyncer := &fakes.MockTransactionSyncer{} - mockTransactionSyncer.SyncTransactionsError = fakes.FakeError - extractor.Syncer = mockTransactionSyncer + It("returns error if syncing transactions fails", func(done Done) { + addMissingHeader(extractor) + addFetchedLog(extractor) + addTransformerConfig(extractor) + mockTransactionSyncer := &fakes.MockTransactionSyncer{} + mockTransactionSyncer.SyncTransactionsError = fakes.FakeError + extractor.Syncer = mockTransactionSyncer - err := extractor.ExtractLogs(constants.HeaderMissing) + go extractor.ExtractLogs(constants.HeaderMissing, errsChan, missingHeadersFound) - Expect(err).To(HaveOccurred()) - Expect(err).To(MatchError(fakes.FakeError)) - }) + Expect(<-errsChan).To(MatchError(fakes.FakeError)) + close(done) + }) - It("persists fetched logs", func() { - addMissingHeader(extractor) - addTransformerConfig(extractor) - fakeLogs := []types.Log{{ - Address: common.HexToAddress("0xA"), - Topics: []common.Hash{common.HexToHash("0xA")}, - Data: []byte{}, - Index: 0, - }} - mockLogFetcher := &mocks.MockLogFetcher{ReturnLogs: fakeLogs} - extractor.Fetcher = mockLogFetcher - mockLogRepository := &fakes.MockHeaderSyncLogRepository{} - extractor.LogRepository = mockLogRepository + It("persists fetched logs", func(done Done) { + addMissingHeader(extractor) + addTransformerConfig(extractor) + fakeLogs := []types.Log{{ + Address: common.HexToAddress("0xA"), + Topics: []common.Hash{common.HexToHash("0xA")}, + Data: []byte{}, + Index: 0, + }} + mockLogFetcher := &mocks.MockLogFetcher{ReturnLogs: fakeLogs} + extractor.Fetcher = mockLogFetcher + mockLogRepository := &fakes.MockHeaderSyncLogRepository{} + extractor.LogRepository = mockLogRepository - err := extractor.ExtractLogs(constants.HeaderMissing) + go extractor.ExtractLogs(constants.HeaderMissing, errsChan, missingHeadersFound) - Expect(err).NotTo(HaveOccurred()) - Expect(mockLogRepository.PassedLogs).To(Equal(fakeLogs)) - }) + Eventually(func() []types.Log { + return mockLogRepository.PassedLogs + }).Should(Equal(fakeLogs)) + close(done) + }) - It("returns error if persisting logs fails", func() { - addMissingHeader(extractor) - addFetchedLog(extractor) - addTransformerConfig(extractor) - mockLogRepository := &fakes.MockHeaderSyncLogRepository{} - mockLogRepository.CreateError = fakes.FakeError - extractor.LogRepository = mockLogRepository + It("returns error if persisting logs fails", func(done Done) { + addMissingHeader(extractor) + addFetchedLog(extractor) + addTransformerConfig(extractor) + mockLogRepository := &fakes.MockHeaderSyncLogRepository{} + mockLogRepository.CreateError = fakes.FakeError + extractor.LogRepository = mockLogRepository - err := extractor.ExtractLogs(constants.HeaderMissing) + go extractor.ExtractLogs(constants.HeaderMissing, errsChan, missingHeadersFound) - Expect(err).To(HaveOccurred()) - Expect(err).To(MatchError(fakes.FakeError)) - }) + Expect(<-errsChan).To(MatchError(fakes.FakeError)) + close(done) + }) + }) - It("marks header checked", func() { - addFetchedLog(extractor) - addTransformerConfig(extractor) - mockCheckedHeadersRepository := &fakes.MockCheckedHeadersRepository{} - headerID := rand.Int63() - mockCheckedHeadersRepository.ReturnHeaders = []core.Header{{Id: headerID}} - extractor.CheckedHeadersRepository = mockCheckedHeadersRepository + It("marks header checked", func(done Done) { + addFetchedLog(extractor) + addTransformerConfig(extractor) + mockCheckedHeadersRepository := &fakes.MockCheckedHeadersRepository{} + headerID := rand.Int63() + mockCheckedHeadersRepository.ReturnHeaders = []core.Header{{Id: headerID}} + extractor.CheckedHeadersRepository = mockCheckedHeadersRepository - err := extractor.ExtractLogs(constants.HeaderMissing) + go extractor.ExtractLogs(constants.HeaderMissing, errsChan, missingHeadersFound) - Expect(err).NotTo(HaveOccurred()) - Expect(mockCheckedHeadersRepository.HeaderID).To(Equal(headerID)) - }) + Eventually(func() int64 { + return mockCheckedHeadersRepository.HeaderID + }).Should(Equal(headerID)) + close(done) + }) - It("returns error if marking header checked fails", func() { - addFetchedLog(extractor) - addTransformerConfig(extractor) - mockCheckedHeadersRepository := &fakes.MockCheckedHeadersRepository{} - mockCheckedHeadersRepository.ReturnHeaders = []core.Header{{Id: rand.Int63()}} - mockCheckedHeadersRepository.MarkHeaderCheckedReturnError = fakes.FakeError - extractor.CheckedHeadersRepository = mockCheckedHeadersRepository + It("returns error if marking header checked fails", func(done Done) { + addFetchedLog(extractor) + addTransformerConfig(extractor) + mockCheckedHeadersRepository := &fakes.MockCheckedHeadersRepository{} + mockCheckedHeadersRepository.ReturnHeaders = []core.Header{{Id: rand.Int63()}} + mockCheckedHeadersRepository.MarkHeaderCheckedReturnError = fakes.FakeError + extractor.CheckedHeadersRepository = mockCheckedHeadersRepository - err := extractor.ExtractLogs(constants.HeaderMissing) + go extractor.ExtractLogs(constants.HeaderMissing, errsChan, missingHeadersFound) - Expect(err).To(HaveOccurred()) - Expect(err).To(MatchError(fakes.FakeError)) + Expect(<-errsChan).To(MatchError(fakes.FakeError)) + close(done) + }) + + It("emits that missing headers were found", func(done Done) { + addMissingHeader(extractor) + addTransformerConfig(extractor) + + go extractor.ExtractLogs(constants.HeaderMissing, errsChan, missingHeadersFound) + + Expect(<-missingHeadersFound).To(BeTrue()) + close(done) + }) }) }) }) diff --git a/libraries/shared/mocks/log_delegator.go b/libraries/shared/mocks/log_delegator.go index 95ad5fe8..8a2338a4 100644 --- a/libraries/shared/mocks/log_delegator.go +++ b/libraries/shared/mocks/log_delegator.go @@ -22,15 +22,23 @@ import ( type MockLogDelegator struct { AddedTransformers []transformer.EventTransformer - DelegateCalled bool - DelegateError error + DelegateCallCount int + DelegateErrors []error + LogsFound []bool } func (delegator *MockLogDelegator) AddTransformer(t transformer.EventTransformer) { delegator.AddedTransformers = append(delegator.AddedTransformers, t) } -func (delegator *MockLogDelegator) DelegateLogs() error { - delegator.DelegateCalled = true - return delegator.DelegateError +func (delegator *MockLogDelegator) DelegateLogs(errs chan error, logsFound chan bool) { + delegator.DelegateCallCount++ + var delegateErrorThisRun error + delegateErrorThisRun, delegator.DelegateErrors = delegator.DelegateErrors[0], delegator.DelegateErrors[1:] + if delegateErrorThisRun != nil { + errs <- delegateErrorThisRun + } + var logsFoundThisRun bool + logsFoundThisRun, delegator.LogsFound = delegator.LogsFound[0], delegator.LogsFound[1:] + logsFound <- logsFoundThisRun } diff --git a/libraries/shared/mocks/log_extractor.go b/libraries/shared/mocks/log_extractor.go index b179b1ea..0fba9d27 100644 --- a/libraries/shared/mocks/log_extractor.go +++ b/libraries/shared/mocks/log_extractor.go @@ -22,16 +22,24 @@ import ( ) type MockLogExtractor struct { - AddedConfigs []transformer.EventTransformerConfig - ExtractLogsCalled bool - ExtractLogsError error + AddedConfigs []transformer.EventTransformerConfig + ExtractLogsCount int + ExtractLogsErrors []error + MissingHeadersExist []bool } func (extractor *MockLogExtractor) AddTransformerConfig(config transformer.EventTransformerConfig) { extractor.AddedConfigs = append(extractor.AddedConfigs, config) } -func (extractor *MockLogExtractor) ExtractLogs(recheckHeaders constants.TransformerExecution) error { - extractor.ExtractLogsCalled = true - return extractor.ExtractLogsError +func (extractor *MockLogExtractor) ExtractLogs(recheckHeaders constants.TransformerExecution, errs chan error, missingHeadersFound chan bool) { + extractor.ExtractLogsCount++ + var errorThisRun error + errorThisRun, extractor.ExtractLogsErrors = extractor.ExtractLogsErrors[0], extractor.ExtractLogsErrors[1:] + if errorThisRun != nil { + errs <- errorThisRun + } + var missingHeadersExist bool + missingHeadersExist, extractor.MissingHeadersExist = extractor.MissingHeadersExist[0], extractor.MissingHeadersExist[1:] + missingHeadersFound <- missingHeadersExist } diff --git a/libraries/shared/watcher/event_watcher.go b/libraries/shared/watcher/event_watcher.go index c682400c..ff40e302 100644 --- a/libraries/shared/watcher/event_watcher.go +++ b/libraries/shared/watcher/event_watcher.go @@ -27,8 +27,11 @@ import ( "github.com/vulcanize/vulcanizedb/pkg/core" "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres" "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres/repositories" + "time" ) +const NoNewDataPause = time.Second * 7 + type EventWatcher struct { blockChain core.BlockChain db *postgres.DB @@ -66,18 +69,62 @@ func (watcher *EventWatcher) AddTransformers(initializers []transformer.EventTra } // Extracts and delegates watched log events. -func (watcher *EventWatcher) Execute(recheckHeaders constants.TransformerExecution) error { - extractErr := watcher.LogExtractor.ExtractLogs(recheckHeaders) - if extractErr != nil { - logrus.Errorf("error extracting logs in event watcher: %s", extractErr.Error()) - return extractErr - } +func (watcher *EventWatcher) Execute(recheckHeaders constants.TransformerExecution, errsChan chan error) { + extractErrsChan := make(chan error) + delegateErrsChan := make(chan error) - delegateErr := watcher.LogDelegator.DelegateLogs() - if delegateErr != nil { - logrus.Errorf("error delegating logs in event watcher: %s", delegateErr.Error()) - return delegateErr - } + go watcher.extractLogs(recheckHeaders, extractErrsChan) + go watcher.delegateLogs(delegateErrsChan) + + for { + select { + case extractErr := <-extractErrsChan: + logrus.Errorf("error extracting logs in event watcher: %s", extractErr.Error()) + errsChan <- extractErr + case delegateErr := <-delegateErrsChan: + logrus.Errorf("error delegating logs in event watcher: %s", delegateErr.Error()) + errsChan <- delegateErr + } + } +} + +func (watcher *EventWatcher) extractLogs(recheckHeaders constants.TransformerExecution, errs chan error) { + extractLogsErr := make(chan error) + missingHeadersFound := make(chan bool) + go watcher.LogExtractor.ExtractLogs(recheckHeaders, extractLogsErr, missingHeadersFound) + + for { + select { + case err := <-extractLogsErr: + errs <- err + case missingHeaders := <-missingHeadersFound: + if missingHeaders { + go watcher.extractLogs(recheckHeaders, errs) + } else { + time.Sleep(NoNewDataPause) + go watcher.extractLogs(recheckHeaders, errs) + } + } + } +} + +func (watcher *EventWatcher) delegateLogs(errs chan error) { + delegateLogsErr := make(chan error) + logsFound := make(chan bool) + go watcher.LogDelegator.DelegateLogs(delegateLogsErr, logsFound) + + for { + select { + case err := <-delegateLogsErr: + errs <- err + case logs := <-logsFound: + if logs { + go watcher.delegateLogs(errs) + } else { + time.Sleep(NoNewDataPause) + go watcher.delegateLogs(errs) + } + } + } - return nil } diff --git a/libraries/shared/watcher/event_watcher_test.go b/libraries/shared/watcher/event_watcher_test.go index 041aca42..be165396 100644 --- a/libraries/shared/watcher/event_watcher_test.go +++ b/libraries/shared/watcher/event_watcher_test.go @@ -30,13 +30,13 @@ var _ = Describe("Event Watcher", func() { var ( delegator *mocks.MockLogDelegator extractor *mocks.MockLogExtractor - eventWatcher watcher.EventWatcher + eventWatcher *watcher.EventWatcher ) BeforeEach(func() { delegator = &mocks.MockLogDelegator{} extractor = &mocks.MockLogExtractor{} - eventWatcher = watcher.EventWatcher{ + eventWatcher = &watcher.EventWatcher{ LogDelegator: delegator, LogExtractor: extractor, } @@ -78,36 +78,115 @@ var _ = Describe("Event Watcher", func() { }) Describe("Execute", func() { - It("extracts watched logs", func() { - err := eventWatcher.Execute(constants.HeaderMissing) + var errsChan chan error - Expect(err).NotTo(HaveOccurred()) - Expect(extractor.ExtractLogsCalled).To(BeTrue()) + BeforeEach(func() { + errsChan = make(chan error) }) - It("returns error if extracting logs fails", func() { - extractor.ExtractLogsError = fakes.FakeError + It("extracts watched logs", func(done Done) { + delegator.DelegateErrors = []error{nil} + delegator.LogsFound = []bool{false} + extractor.ExtractLogsErrors = []error{nil} + extractor.MissingHeadersExist = []bool{false} - err := eventWatcher.Execute(constants.HeaderMissing) + go eventWatcher.Execute(constants.HeaderMissing, errsChan) - Expect(err).To(HaveOccurred()) - Expect(err).To(MatchError(fakes.FakeError)) + Eventually(func() int { + return extractor.ExtractLogsCount + }).Should(Equal(1)) + close(done) }) - It("delegates untransformed logs", func() { - err := eventWatcher.Execute(constants.HeaderMissing) + It("returns error if extracting logs fails", func(done Done) { + delegator.DelegateErrors = []error{nil} + delegator.LogsFound = []bool{false} + extractor.ExtractLogsErrors = []error{fakes.FakeError} + extractor.MissingHeadersExist = []bool{false} - Expect(err).NotTo(HaveOccurred()) - Expect(delegator.DelegateCalled).To(BeTrue()) + go eventWatcher.Execute(constants.HeaderMissing, errsChan) + + Expect(<-errsChan).To(MatchError(fakes.FakeError)) + close(done) }) - It("returns error if delegating logs fails", func() { - delegator.DelegateError = fakes.FakeError + It("extracts watched logs again if missing headers found", func(done Done) { + delegator.DelegateErrors = []error{nil} + delegator.LogsFound = []bool{false} + extractor.ExtractLogsErrors = []error{nil, nil} + extractor.MissingHeadersExist = []bool{true, false} - err := eventWatcher.Execute(constants.HeaderMissing) + go eventWatcher.Execute(constants.HeaderMissing, errsChan) - Expect(err).To(HaveOccurred()) - Expect(err).To(MatchError(fakes.FakeError)) + Eventually(func() int { + return extractor.ExtractLogsCount + }).Should(Equal(2)) + close(done) + }) + + It("returns error if extracting logs fails on subsequent run", func(done Done) { + delegator.DelegateErrors = []error{nil} + delegator.LogsFound = []bool{false} + extractor.ExtractLogsErrors = []error{nil, fakes.FakeError} + extractor.MissingHeadersExist = []bool{true, false} + + go eventWatcher.Execute(constants.HeaderMissing, errsChan) + + Expect(<-errsChan).To(MatchError(fakes.FakeError)) + close(done) + + }) + + It("delegates untransformed logs", func(done Done) { + delegator.DelegateErrors = []error{nil} + delegator.LogsFound = []bool{false} + extractor.ExtractLogsErrors = []error{nil} + extractor.MissingHeadersExist = []bool{false} + + go eventWatcher.Execute(constants.HeaderMissing, errsChan) + + Eventually(func() int { + return delegator.DelegateCallCount + }).Should(Equal(1)) + close(done) + }) + + It("returns error if delegating logs fails", func(done Done) { + delegator.LogsFound = []bool{false} + delegator.DelegateErrors = []error{fakes.FakeError} + extractor.ExtractLogsErrors = []error{nil} + extractor.MissingHeadersExist = []bool{false} + + go eventWatcher.Execute(constants.HeaderMissing, errsChan) + + Expect(<-errsChan).To(MatchError(fakes.FakeError)) + close(done) + }) + + It("delegates logs again if untransformed logs found", func(done Done) { + delegator.DelegateErrors = []error{nil, nil} + delegator.LogsFound = []bool{true, false} + extractor.ExtractLogsErrors = []error{nil} + extractor.MissingHeadersExist = []bool{false} + + go eventWatcher.Execute(constants.HeaderMissing, errsChan) + + Eventually(func() int { + return delegator.DelegateCallCount + }).Should(Equal(2)) + close(done) + }) + + It("returns error if delegating logs fails on subsequent run", func(done Done) { + delegator.DelegateErrors = []error{nil, fakes.FakeError} + delegator.LogsFound = []bool{true, false} + extractor.ExtractLogsErrors = []error{nil} + extractor.MissingHeadersExist = []bool{false} + + go eventWatcher.Execute(constants.HeaderMissing, errsChan) + + Expect(<-errsChan).To(MatchError(fakes.FakeError)) + close(done) }) }) }) diff --git a/pkg/datastore/postgres/repositories/checked_headers_repository.go b/pkg/datastore/postgres/repositories/checked_headers_repository.go index 272a8dae..341cb272 100644 --- a/pkg/datastore/postgres/repositories/checked_headers_repository.go +++ b/pkg/datastore/postgres/repositories/checked_headers_repository.go @@ -52,8 +52,7 @@ func (repo CheckedHeadersRepository) MissingHeaders(startingBlockNumber, endingB LEFT JOIN checked_headers on headers.id = header_id WHERE (header_id ISNULL OR check_count < $2) AND headers.block_number >= $1 - AND headers.eth_node_fingerprint = $3 - LIMIT 100` + AND headers.eth_node_fingerprint = $3` err = repo.db.Select(&result, query, startingBlockNumber, checkCount, repo.db.Node.ID) } else { query = `SELECT headers.id, headers.block_number, headers.hash FROM headers @@ -61,8 +60,7 @@ func (repo CheckedHeadersRepository) MissingHeaders(startingBlockNumber, endingB WHERE (header_id ISNULL OR check_count < $3) AND headers.block_number >= $1 AND headers.block_number <= $2 - AND headers.eth_node_fingerprint = $4 - LIMIT 100` + AND headers.eth_node_fingerprint = $4` err = repo.db.Select(&result, query, startingBlockNumber, endingBlockNumber, checkCount, repo.db.Node.ID) } diff --git a/pkg/datastore/postgres/repositories/checked_headers_repository_test.go b/pkg/datastore/postgres/repositories/checked_headers_repository_test.go index 422d00b3..c9a9284a 100644 --- a/pkg/datastore/postgres/repositories/checked_headers_repository_test.go +++ b/pkg/datastore/postgres/repositories/checked_headers_repository_test.go @@ -170,18 +170,6 @@ var _ = Describe("Checked Headers repository", func() { Expect(nodeTwoMissingHeaders[1].BlockNumber).To(Or(Equal(startingBlockNumber+10), Equal(middleBlockNumber+10), Equal(endingBlockNumber+10))) Expect(nodeTwoMissingHeaders[2].BlockNumber).To(Or(Equal(startingBlockNumber+10), Equal(middleBlockNumber+10), Equal(endingBlockNumber+10))) }) - - It("only returns 100 results to prevent blocking log delegation", func() { - for n := outOfRangeBlockNumber + 1; n < outOfRangeBlockNumber+100; n++ { - _, err := headerRepository.CreateOrUpdateHeader(fakes.GetFakeHeader(n)) - Expect(err).NotTo(HaveOccurred()) - } - - missingHeaders, err := repo.MissingHeaders(startingBlockNumber, endingBlockNumber+200, uncheckedCheckCount) - - Expect(err).NotTo(HaveOccurred()) - Expect(len(missingHeaders)).To(Equal(100)) - }) }) Describe("when ending block is -1", func() { @@ -252,19 +240,6 @@ var _ = Describe("Checked Headers repository", func() { Expect(nodeTwoMissingHeaders[2].BlockNumber).To(Or(Equal(startingBlockNumber+10), Equal(middleBlockNumber+10), Equal(endingBlockNumber+10), Equal(outOfRangeBlockNumber+10))) Expect(nodeTwoMissingHeaders[3].BlockNumber).To(Or(Equal(startingBlockNumber+10), Equal(middleBlockNumber+10), Equal(endingBlockNumber+10), Equal(outOfRangeBlockNumber+10))) }) - - It("only returns 100 results to prevent blocking log delegation", func() { - for n := outOfRangeBlockNumber + 1; n < outOfRangeBlockNumber+100; n++ { - _, err := headerRepository.CreateOrUpdateHeader(fakes.GetFakeHeader(n)) - Expect(err).NotTo(HaveOccurred()) - } - - missingHeaders, err := repo.MissingHeaders(startingBlockNumber, endingBlock, uncheckedCheckCount) - - Expect(err).NotTo(HaveOccurred()) - Expect(len(missingHeaders)).To(Equal(100)) - }) }) - }) }) From 1883a11ab1fe510aaa6125e85d77b0da3a4e7992 Mon Sep 17 00:00:00 2001 From: Rob Mulholand Date: Wed, 14 Aug 2019 12:19:51 -0500 Subject: [PATCH 06/21] Update comment in log chunker MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-Authored-By: Edvard Hübinette --- libraries/shared/chunker/log_chunker.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libraries/shared/chunker/log_chunker.go b/libraries/shared/chunker/log_chunker.go index 6adceb42..38edd5af 100644 --- a/libraries/shared/chunker/log_chunker.go +++ b/libraries/shared/chunker/log_chunker.go @@ -51,7 +51,7 @@ func (chunker *LogChunker) AddConfig(transformerConfig transformer.EventTransfor } } -// Goes through an array of logs, associating relevant logs (matching addresses and topic) with transformers +// Goes through a slice of logs, associating relevant logs (matching addresses and topic) with transformers func (chunker *LogChunker) ChunkLogs(logs []core.HeaderSyncLog) map[string][]core.HeaderSyncLog { chunks := map[string][]core.HeaderSyncLog{} for _, log := range logs { From d76be4962bab0cb52d8489f21a5a6bc95a9d0b03 Mon Sep 17 00:00:00 2001 From: Rob Mulholand Date: Wed, 14 Aug 2019 17:29:04 -0500 Subject: [PATCH 07/21] Remove unnecessary async from the event watcher - extract and delegate logs synchronously after initial goroutine fired --- libraries/shared/logs/delegator.go | 22 +-- libraries/shared/logs/delegator_test.go | 75 ++++----- libraries/shared/logs/extractor.go | 31 ++-- libraries/shared/logs/extractor_test.go | 177 +++++++++------------- libraries/shared/mocks/log_delegator.go | 6 +- libraries/shared/mocks/log_extractor.go | 6 +- libraries/shared/watcher/event_watcher.go | 49 +++--- 7 files changed, 149 insertions(+), 217 deletions(-) diff --git a/libraries/shared/logs/delegator.go b/libraries/shared/logs/delegator.go index f04023f2..7d2b2a5c 100644 --- a/libraries/shared/logs/delegator.go +++ b/libraries/shared/logs/delegator.go @@ -27,9 +27,14 @@ import ( var ErrNoTransformers = errors.New("no event transformers configured in the log delegator") +const ( + logsFound = true + noLogsFound = false +) + type ILogDelegator interface { AddTransformer(t transformer.EventTransformer) - DelegateLogs(errs chan error, logsFound chan bool) + DelegateLogs() (error, bool) } type LogDelegator struct { @@ -43,31 +48,28 @@ func (delegator *LogDelegator) AddTransformer(t transformer.EventTransformer) { delegator.Chunker.AddConfig(t.GetConfig()) } -func (delegator *LogDelegator) DelegateLogs(errs chan error, logsFound chan bool) { +func (delegator *LogDelegator) DelegateLogs() (error, bool) { if len(delegator.Transformers) < 1 { - errs <- ErrNoTransformers - return + return ErrNoTransformers, noLogsFound } persistedLogs, fetchErr := delegator.LogRepository.GetUntransformedHeaderSyncLogs() if fetchErr != nil { logrus.Errorf("error loading logs from db: %s", fetchErr.Error()) - errs <- fetchErr - return + return fetchErr, noLogsFound } if len(persistedLogs) < 1 { - logsFound <- false + return nil, noLogsFound } transformErr := delegator.delegateLogs(persistedLogs) if transformErr != nil { logrus.Errorf("error transforming logs: %s", transformErr) - errs <- transformErr - return + return transformErr, logsFound } - logsFound <- true + return nil, logsFound } func (delegator *LogDelegator) delegateLogs(logs []core.HeaderSyncLog) error { diff --git a/libraries/shared/logs/delegator_test.go b/libraries/shared/logs/delegator_test.go index c1132bd5..f2bd581c 100644 --- a/libraries/shared/logs/delegator_test.go +++ b/libraries/shared/logs/delegator_test.go @@ -59,61 +59,49 @@ var _ = Describe("Log delegator", func() { }) Describe("DelegateLogs", func() { - var ( - errsChan chan error - logsFound chan bool - ) - - BeforeEach(func() { - errsChan = make(chan error) - logsFound = make(chan bool) - }) - - It("returns an error if no transformers configured", func(done Done) { + It("returns an error if no transformers configured", func() { delegator := newDelegator(&fakes.MockHeaderSyncLogRepository{}) - go delegator.DelegateLogs(errsChan, logsFound) + err, _ := delegator.DelegateLogs() - Expect(<-errsChan).To(MatchError(logs.ErrNoTransformers)) - close(done) + Expect(err).To(HaveOccurred()) + Expect(err).To(MatchError(logs.ErrNoTransformers)) }) - It("gets untransformed logs", func(done Done) { + It("gets untransformed logs", func() { mockLogRepository := &fakes.MockHeaderSyncLogRepository{} delegator := newDelegator(mockLogRepository) delegator.AddTransformer(&mocks.MockEventTransformer{}) - go delegator.DelegateLogs(errsChan, logsFound) + err, _ := delegator.DelegateLogs() - Eventually(func() bool { - return mockLogRepository.GetCalled - }).Should(BeTrue()) - close(done) + Expect(err).NotTo(HaveOccurred()) + Expect(mockLogRepository.GetCalled).To(BeTrue()) }) - It("emits error if getting untransformed logs fails", func(done Done) { + It("emits error if getting untransformed logs fails", func() { mockLogRepository := &fakes.MockHeaderSyncLogRepository{} mockLogRepository.GetError = fakes.FakeError delegator := newDelegator(mockLogRepository) delegator.AddTransformer(&mocks.MockEventTransformer{}) - go delegator.DelegateLogs(errsChan, logsFound) + err, _ := delegator.DelegateLogs() - Expect(<-errsChan).To(MatchError(fakes.FakeError)) - close(done) + Expect(err).To(HaveOccurred()) + Expect(err).To(MatchError(fakes.FakeError)) }) - It("emits that no logs were found if no logs returned", func(done Done) { + It("emits that no logs were found if no logs returned", func() { delegator := newDelegator(&fakes.MockHeaderSyncLogRepository{}) delegator.AddTransformer(&mocks.MockEventTransformer{}) - go delegator.DelegateLogs(errsChan, logsFound) + err, logsFound := delegator.DelegateLogs() - Expect(<-logsFound).To(BeFalse()) - close(done) + Expect(err).NotTo(HaveOccurred()) + Expect(logsFound).To(BeFalse()) }) - It("delegates chunked logs to transformers", func(done Done) { + It("delegates chunked logs to transformers", func() { fakeTransformer := &mocks.MockEventTransformer{} config := mocks.FakeTransformerConfig fakeTransformer.SetTransformerConfig(config) @@ -127,31 +115,27 @@ var _ = Describe("Log delegator", func() { delegator := newDelegator(mockLogRepository) delegator.AddTransformer(fakeTransformer) - go delegator.DelegateLogs(errsChan, logsFound) + err, _ := delegator.DelegateLogs() - Eventually(func() bool { - return fakeTransformer.ExecuteWasCalled - }).Should(BeTrue()) - Eventually(func() []core.HeaderSyncLog { - return fakeTransformer.PassedLogs - }).Should(Equal(fakeHeaderSyncLogs)) - close(done) + Expect(err).NotTo(HaveOccurred()) + Expect(fakeTransformer.ExecuteWasCalled).To(BeTrue()) + Expect(fakeTransformer.PassedLogs).To(Equal(fakeHeaderSyncLogs)) }) - It("emits error if transformer returns an error", func(done Done) { + It("emits error if transformer returns an error", func() { mockLogRepository := &fakes.MockHeaderSyncLogRepository{} mockLogRepository.ReturnLogs = []core.HeaderSyncLog{{}} delegator := newDelegator(mockLogRepository) fakeTransformer := &mocks.MockEventTransformer{ExecuteError: fakes.FakeError} delegator.AddTransformer(fakeTransformer) - go delegator.DelegateLogs(errsChan, logsFound) + err, _ := delegator.DelegateLogs() - Expect(<-errsChan).To(MatchError(fakes.FakeError)) - close(done) + Expect(err).To(HaveOccurred()) + Expect(err).To(MatchError(fakes.FakeError)) }) - It("emits logs found when logs returned and delegated", func(done Done) { + It("emits logs found when logs returned and delegated", func() { fakeTransformer := &mocks.MockEventTransformer{} config := mocks.FakeTransformerConfig fakeTransformer.SetTransformerConfig(config) @@ -165,13 +149,12 @@ var _ = Describe("Log delegator", func() { delegator := newDelegator(mockLogRepository) delegator.AddTransformer(fakeTransformer) - go delegator.DelegateLogs(errsChan, logsFound) + err, logsFound := delegator.DelegateLogs() - Expect(<-logsFound).To(BeTrue()) - close(done) + Expect(err).NotTo(HaveOccurred()) + Expect(logsFound).To(BeTrue()) }) }) - }) func newDelegator(headerSyncLogRepository *fakes.MockHeaderSyncLogRepository) *logs.LogDelegator { diff --git a/libraries/shared/logs/extractor.go b/libraries/shared/logs/extractor.go index 5c42dd8a..f8132352 100644 --- a/libraries/shared/logs/extractor.go +++ b/libraries/shared/logs/extractor.go @@ -30,9 +30,14 @@ import ( var ErrNoWatchedAddresses = errors.New("no watched addresses configured in the log extractor") +const ( + missingHeadersFound = true + noMissingHeadersFound = false +) + type ILogExtractor interface { AddTransformerConfig(config transformer.EventTransformerConfig) - ExtractLogs(recheckHeaders constants.TransformerExecution, errs chan error, missingHeadersFound chan bool) + ExtractLogs(recheckHeaders constants.TransformerExecution) (error, bool) } type LogExtractor struct { @@ -59,56 +64,50 @@ func (extractor *LogExtractor) AddTransformerConfig(config transformer.EventTran } // Fetch and persist watched logs -func (extractor LogExtractor) ExtractLogs(recheckHeaders constants.TransformerExecution, errs chan error, missingHeadersFound chan bool) { +func (extractor LogExtractor) ExtractLogs(recheckHeaders constants.TransformerExecution) (error, bool) { if len(extractor.Addresses) < 1 { logrus.Errorf("error extracting logs: %s", ErrNoWatchedAddresses.Error()) - errs <- ErrNoWatchedAddresses - return + return ErrNoWatchedAddresses, noMissingHeadersFound } missingHeaders, missingHeadersErr := extractor.CheckedHeadersRepository.MissingHeaders(*extractor.StartingBlock, -1, getCheckCount(recheckHeaders)) if missingHeadersErr != nil { logrus.Errorf("error fetching missing headers: %s", missingHeadersErr) - errs <- missingHeadersErr - return + return missingHeadersErr, noMissingHeadersFound } if len(missingHeaders) < 1 { - missingHeadersFound <- false - return + return nil, noMissingHeadersFound } for _, header := range missingHeaders { logs, fetchLogsErr := extractor.Fetcher.FetchLogs(extractor.Addresses, extractor.Topics, header) if fetchLogsErr != nil { logError("error fetching logs for header: %s", fetchLogsErr, header) - errs <- fetchLogsErr - return + return fetchLogsErr, missingHeadersFound } if len(logs) > 0 { transactionsSyncErr := extractor.Syncer.SyncTransactions(header.Id, logs) if transactionsSyncErr != nil { logError("error syncing transactions: %s", transactionsSyncErr, header) - errs <- transactionsSyncErr - return + return transactionsSyncErr, missingHeadersFound } createLogsErr := extractor.LogRepository.CreateHeaderSyncLogs(header.Id, logs) if createLogsErr != nil { logError("error persisting logs: %s", createLogsErr, header) - errs <- createLogsErr - return + return createLogsErr, missingHeadersFound } } markHeaderCheckedErr := extractor.CheckedHeadersRepository.MarkHeaderChecked(header.Id) if markHeaderCheckedErr != nil { logError("error marking header checked: %s", markHeaderCheckedErr, header) - errs <- markHeaderCheckedErr + return markHeaderCheckedErr, missingHeadersFound } } - missingHeadersFound <- true + return nil, missingHeadersFound } func earlierStartingBlockNumber(transformerBlock, watcherBlock int64) bool { diff --git a/libraries/shared/logs/extractor_test.go b/libraries/shared/logs/extractor_test.go index 539c4c29..d21f2f6a 100644 --- a/libraries/shared/logs/extractor_test.go +++ b/libraries/shared/logs/extractor_test.go @@ -81,109 +81,84 @@ var _ = Describe("Log extractor", func() { }) Describe("ExtractLogs", func() { - var ( - errsChan chan error - missingHeadersFound chan bool - ) + It("returns error if no watched addresses configured", func() { + err, _ := extractor.ExtractLogs(constants.HeaderMissing) - BeforeEach(func() { - errsChan = make(chan error) - missingHeadersFound = make(chan bool) - }) - - It("returns error if no watched addresses configured", func(done Done) { - go extractor.ExtractLogs(constants.HeaderMissing, errsChan, missingHeadersFound) - - Expect(<-errsChan).To(MatchError(logs.ErrNoWatchedAddresses)) - close(done) + Expect(err).To(HaveOccurred()) + Expect(err).To(MatchError(logs.ErrNoWatchedAddresses)) }) Describe("when checking missing headers", func() { - It("gets missing headers since configured starting block with check_count < 1", func(done Done) { + It("gets missing headers since configured starting block with check_count < 1", func() { mockCheckedHeadersRepository := &fakes.MockCheckedHeadersRepository{} mockCheckedHeadersRepository.ReturnHeaders = []core.Header{{}} extractor.CheckedHeadersRepository = mockCheckedHeadersRepository startingBlockNumber := rand.Int63() extractor.AddTransformerConfig(getTransformerConfig(startingBlockNumber)) - go extractor.ExtractLogs(constants.HeaderMissing, errsChan, missingHeadersFound) + err, _ := extractor.ExtractLogs(constants.HeaderMissing) - Eventually(func() int64 { - return mockCheckedHeadersRepository.StartingBlockNumber - }).Should(Equal(startingBlockNumber)) - Eventually(func() int64 { - return mockCheckedHeadersRepository.EndingBlockNumber - }).Should(Equal(int64(-1))) - Eventually(func() int64 { - return mockCheckedHeadersRepository.CheckCount - }).Should(Equal(int64(1))) - close(done) + Expect(err).NotTo(HaveOccurred()) + Expect(mockCheckedHeadersRepository.StartingBlockNumber).To(Equal(startingBlockNumber)) + Expect(mockCheckedHeadersRepository.EndingBlockNumber).To(Equal(int64(-1))) + Expect(mockCheckedHeadersRepository.CheckCount).To(Equal(int64(1))) }) }) Describe("when rechecking headers", func() { - It("gets missing headers since configured starting block with check_count < RecheckHeaderCap", func(done Done) { + It("gets missing headers since configured starting block with check_count < RecheckHeaderCap", func() { mockCheckedHeadersRepository := &fakes.MockCheckedHeadersRepository{} mockCheckedHeadersRepository.ReturnHeaders = []core.Header{{}} extractor.CheckedHeadersRepository = mockCheckedHeadersRepository startingBlockNumber := rand.Int63() extractor.AddTransformerConfig(getTransformerConfig(startingBlockNumber)) - go extractor.ExtractLogs(constants.HeaderRecheck, errsChan, missingHeadersFound) + err, _ := extractor.ExtractLogs(constants.HeaderRecheck) - Eventually(func() int64 { - return mockCheckedHeadersRepository.StartingBlockNumber - }).Should(Equal(startingBlockNumber)) - Eventually(func() int64 { - return mockCheckedHeadersRepository.EndingBlockNumber - }).Should(Equal(int64(-1))) - Eventually(func() int64 { - return mockCheckedHeadersRepository.CheckCount - }).Should(Equal(constants.RecheckHeaderCap)) - close(done) + Expect(err).NotTo(HaveOccurred()) + Expect(mockCheckedHeadersRepository.StartingBlockNumber).To(Equal(startingBlockNumber)) + Expect(mockCheckedHeadersRepository.EndingBlockNumber).To(Equal(int64(-1))) + Expect(mockCheckedHeadersRepository.CheckCount).To(Equal(constants.RecheckHeaderCap)) }) }) - It("emits error if getting missing headers fails", func(done Done) { + It("emits error if getting missing headers fails", func() { addTransformerConfig(extractor) mockCheckedHeadersRepository := &fakes.MockCheckedHeadersRepository{} mockCheckedHeadersRepository.MissingHeadersReturnError = fakes.FakeError extractor.CheckedHeadersRepository = mockCheckedHeadersRepository - go extractor.ExtractLogs(constants.HeaderMissing, errsChan, missingHeadersFound) + err, _ := extractor.ExtractLogs(constants.HeaderMissing) - Expect(<-errsChan).To(MatchError(fakes.FakeError)) - close(done) + Expect(err).To(HaveOccurred()) + Expect(err).To(MatchError(fakes.FakeError)) }) Describe("when no missing headers", func() { - It("does not fetch logs", func(done Done) { + It("does not fetch logs", func() { addTransformerConfig(extractor) mockLogFetcher := &mocks.MockLogFetcher{} extractor.Fetcher = mockLogFetcher - go extractor.ExtractLogs(constants.HeaderMissing, errsChan, missingHeadersFound) + err, _ := extractor.ExtractLogs(constants.HeaderMissing) - Consistently(func() bool { - return mockLogFetcher.FetchCalled - }).Should(BeFalse()) - close(done) + Expect(err).NotTo(HaveOccurred()) + Expect(mockLogFetcher.FetchCalled).To(BeFalse()) }) - It("emits that no missing headers were found", func(done Done) { + It("emits that no missing headers were found", func() { addTransformerConfig(extractor) mockLogFetcher := &mocks.MockLogFetcher{} extractor.Fetcher = mockLogFetcher - go extractor.ExtractLogs(constants.HeaderMissing, errsChan, missingHeadersFound) + _, missingHeadersFound := extractor.ExtractLogs(constants.HeaderMissing) - Expect(<-missingHeadersFound).To(BeFalse()) - close(done) + Expect(missingHeadersFound).To(BeFalse()) }) }) Describe("when there are missing headers", func() { - It("fetches logs for missing headers", func(done Done) { + It("fetches logs for missing headers", func() { addMissingHeader(extractor) config := transformer.EventTransformerConfig{ ContractAddresses: []string{fakes.FakeAddress.Hex()}, @@ -194,68 +169,58 @@ var _ = Describe("Log extractor", func() { mockLogFetcher := &mocks.MockLogFetcher{} extractor.Fetcher = mockLogFetcher - go extractor.ExtractLogs(constants.HeaderMissing, errsChan, missingHeadersFound) + err, _ := extractor.ExtractLogs(constants.HeaderMissing) - Eventually(func() bool { - return mockLogFetcher.FetchCalled - }).Should(BeTrue()) + Expect(err).NotTo(HaveOccurred()) + Expect(mockLogFetcher.FetchCalled).To(BeTrue()) expectedTopics := []common.Hash{common.HexToHash(config.Topic)} - Eventually(func() []common.Hash { - return mockLogFetcher.Topics - }).Should(Equal(expectedTopics)) + Expect(mockLogFetcher.Topics).To(Equal(expectedTopics)) expectedAddresses := transformer.HexStringsToAddresses(config.ContractAddresses) - Eventually(func() []common.Address { - return mockLogFetcher.ContractAddresses - }).Should(Equal(expectedAddresses)) - close(done) + Expect(mockLogFetcher.ContractAddresses).To(Equal(expectedAddresses)) }) - It("returns error if fetching logs fails", func(done Done) { + It("returns error if fetching logs fails", func() { addMissingHeader(extractor) addTransformerConfig(extractor) mockLogFetcher := &mocks.MockLogFetcher{} mockLogFetcher.ReturnError = fakes.FakeError extractor.Fetcher = mockLogFetcher - go extractor.ExtractLogs(constants.HeaderMissing, errsChan, missingHeadersFound) + err, _ := extractor.ExtractLogs(constants.HeaderMissing) - Expect(<-errsChan).To(MatchError(fakes.FakeError)) - close(done) + Expect(err).To(HaveOccurred()) + Expect(err).To(MatchError(fakes.FakeError)) }) Describe("when no fetched logs", func() { - It("does not sync transactions", func(done Done) { + It("does not sync transactions", func() { addMissingHeader(extractor) addTransformerConfig(extractor) mockTransactionSyncer := &fakes.MockTransactionSyncer{} extractor.Syncer = mockTransactionSyncer - go extractor.ExtractLogs(constants.HeaderMissing, errsChan, missingHeadersFound) + err, _ := extractor.ExtractLogs(constants.HeaderMissing) - Consistently(func() bool { - return mockTransactionSyncer.SyncTransactionsCalled - }).Should(BeFalse()) - close(done) + Expect(err).NotTo(HaveOccurred()) + Expect(mockTransactionSyncer.SyncTransactionsCalled).To(BeFalse()) }) }) Describe("when there are fetched logs", func() { - It("syncs transactions", func(done Done) { + It("syncs transactions", func() { addMissingHeader(extractor) addFetchedLog(extractor) addTransformerConfig(extractor) mockTransactionSyncer := &fakes.MockTransactionSyncer{} extractor.Syncer = mockTransactionSyncer - go extractor.ExtractLogs(constants.HeaderMissing, errsChan, missingHeadersFound) + err, _ := extractor.ExtractLogs(constants.HeaderMissing) - Eventually(func() bool { - return mockTransactionSyncer.SyncTransactionsCalled - }).Should(BeTrue()) - close(done) + Expect(err).NotTo(HaveOccurred()) + Expect(mockTransactionSyncer.SyncTransactionsCalled).To(BeTrue()) }) - It("returns error if syncing transactions fails", func(done Done) { + It("returns error if syncing transactions fails", func() { addMissingHeader(extractor) addFetchedLog(extractor) addTransformerConfig(extractor) @@ -263,13 +228,13 @@ var _ = Describe("Log extractor", func() { mockTransactionSyncer.SyncTransactionsError = fakes.FakeError extractor.Syncer = mockTransactionSyncer - go extractor.ExtractLogs(constants.HeaderMissing, errsChan, missingHeadersFound) + err, _ := extractor.ExtractLogs(constants.HeaderMissing) - Expect(<-errsChan).To(MatchError(fakes.FakeError)) - close(done) + Expect(err).To(HaveOccurred()) + Expect(err).To(MatchError(fakes.FakeError)) }) - It("persists fetched logs", func(done Done) { + It("persists fetched logs", func() { addMissingHeader(extractor) addTransformerConfig(extractor) fakeLogs := []types.Log{{ @@ -283,15 +248,13 @@ var _ = Describe("Log extractor", func() { mockLogRepository := &fakes.MockHeaderSyncLogRepository{} extractor.LogRepository = mockLogRepository - go extractor.ExtractLogs(constants.HeaderMissing, errsChan, missingHeadersFound) + err, _ := extractor.ExtractLogs(constants.HeaderMissing) - Eventually(func() []types.Log { - return mockLogRepository.PassedLogs - }).Should(Equal(fakeLogs)) - close(done) + Expect(err).NotTo(HaveOccurred()) + Expect(mockLogRepository.PassedLogs).To(Equal(fakeLogs)) }) - It("returns error if persisting logs fails", func(done Done) { + It("returns error if persisting logs fails", func() { addMissingHeader(extractor) addFetchedLog(extractor) addTransformerConfig(extractor) @@ -299,14 +262,14 @@ var _ = Describe("Log extractor", func() { mockLogRepository.CreateError = fakes.FakeError extractor.LogRepository = mockLogRepository - go extractor.ExtractLogs(constants.HeaderMissing, errsChan, missingHeadersFound) + err, _ := extractor.ExtractLogs(constants.HeaderMissing) - Expect(<-errsChan).To(MatchError(fakes.FakeError)) - close(done) + Expect(err).To(HaveOccurred()) + Expect(err).To(MatchError(fakes.FakeError)) }) }) - It("marks header checked", func(done Done) { + It("marks header checked", func() { addFetchedLog(extractor) addTransformerConfig(extractor) mockCheckedHeadersRepository := &fakes.MockCheckedHeadersRepository{} @@ -314,15 +277,13 @@ var _ = Describe("Log extractor", func() { mockCheckedHeadersRepository.ReturnHeaders = []core.Header{{Id: headerID}} extractor.CheckedHeadersRepository = mockCheckedHeadersRepository - go extractor.ExtractLogs(constants.HeaderMissing, errsChan, missingHeadersFound) + err, _ := extractor.ExtractLogs(constants.HeaderMissing) - Eventually(func() int64 { - return mockCheckedHeadersRepository.HeaderID - }).Should(Equal(headerID)) - close(done) + Expect(err).NotTo(HaveOccurred()) + Expect(mockCheckedHeadersRepository.HeaderID).To(Equal(headerID)) }) - It("returns error if marking header checked fails", func(done Done) { + It("returns error if marking header checked fails", func() { addFetchedLog(extractor) addTransformerConfig(extractor) mockCheckedHeadersRepository := &fakes.MockCheckedHeadersRepository{} @@ -330,20 +291,20 @@ var _ = Describe("Log extractor", func() { mockCheckedHeadersRepository.MarkHeaderCheckedReturnError = fakes.FakeError extractor.CheckedHeadersRepository = mockCheckedHeadersRepository - go extractor.ExtractLogs(constants.HeaderMissing, errsChan, missingHeadersFound) + err, _ := extractor.ExtractLogs(constants.HeaderMissing) - Expect(<-errsChan).To(MatchError(fakes.FakeError)) - close(done) + Expect(err).To(HaveOccurred()) + Expect(err).To(MatchError(fakes.FakeError)) }) - It("emits that missing headers were found", func(done Done) { + It("emits that missing headers were found", func() { addMissingHeader(extractor) addTransformerConfig(extractor) - go extractor.ExtractLogs(constants.HeaderMissing, errsChan, missingHeadersFound) + err, missingHeadersFound := extractor.ExtractLogs(constants.HeaderMissing) - Expect(<-missingHeadersFound).To(BeTrue()) - close(done) + Expect(err).NotTo(HaveOccurred()) + Expect(missingHeadersFound).To(BeTrue()) }) }) }) diff --git a/libraries/shared/mocks/log_delegator.go b/libraries/shared/mocks/log_delegator.go index 8a2338a4..ad6b926e 100644 --- a/libraries/shared/mocks/log_delegator.go +++ b/libraries/shared/mocks/log_delegator.go @@ -31,14 +31,14 @@ func (delegator *MockLogDelegator) AddTransformer(t transformer.EventTransformer delegator.AddedTransformers = append(delegator.AddedTransformers, t) } -func (delegator *MockLogDelegator) DelegateLogs(errs chan error, logsFound chan bool) { +func (delegator *MockLogDelegator) DelegateLogs() (error, bool) { delegator.DelegateCallCount++ var delegateErrorThisRun error delegateErrorThisRun, delegator.DelegateErrors = delegator.DelegateErrors[0], delegator.DelegateErrors[1:] if delegateErrorThisRun != nil { - errs <- delegateErrorThisRun + return delegateErrorThisRun, false } var logsFoundThisRun bool logsFoundThisRun, delegator.LogsFound = delegator.LogsFound[0], delegator.LogsFound[1:] - logsFound <- logsFoundThisRun + return nil, logsFoundThisRun } diff --git a/libraries/shared/mocks/log_extractor.go b/libraries/shared/mocks/log_extractor.go index 0fba9d27..1cb26132 100644 --- a/libraries/shared/mocks/log_extractor.go +++ b/libraries/shared/mocks/log_extractor.go @@ -32,14 +32,14 @@ func (extractor *MockLogExtractor) AddTransformerConfig(config transformer.Event extractor.AddedConfigs = append(extractor.AddedConfigs, config) } -func (extractor *MockLogExtractor) ExtractLogs(recheckHeaders constants.TransformerExecution, errs chan error, missingHeadersFound chan bool) { +func (extractor *MockLogExtractor) ExtractLogs(recheckHeaders constants.TransformerExecution) (error, bool) { extractor.ExtractLogsCount++ var errorThisRun error errorThisRun, extractor.ExtractLogsErrors = extractor.ExtractLogsErrors[0], extractor.ExtractLogsErrors[1:] if errorThisRun != nil { - errs <- errorThisRun + return errorThisRun, false } var missingHeadersExist bool missingHeadersExist, extractor.MissingHeadersExist = extractor.MissingHeadersExist[0], extractor.MissingHeadersExist[1:] - missingHeadersFound <- missingHeadersExist + return nil, missingHeadersExist } diff --git a/libraries/shared/watcher/event_watcher.go b/libraries/shared/watcher/event_watcher.go index ff40e302..e10da6c6 100644 --- a/libraries/shared/watcher/event_watcher.go +++ b/libraries/shared/watcher/event_watcher.go @@ -89,42 +89,29 @@ func (watcher *EventWatcher) Execute(recheckHeaders constants.TransformerExecuti } func (watcher *EventWatcher) extractLogs(recheckHeaders constants.TransformerExecution, errs chan error) { - extractLogsErr := make(chan error) - missingHeadersFound := make(chan bool) - go watcher.LogExtractor.ExtractLogs(recheckHeaders, extractLogsErr, missingHeadersFound) + err, missingHeadersFound := watcher.LogExtractor.ExtractLogs(recheckHeaders) + if err != nil { + errs <- err + } - for { - select { - case err := <-extractLogsErr: - errs <- err - case missingHeaders := <-missingHeadersFound: - if missingHeaders { - go watcher.extractLogs(recheckHeaders, errs) - } else { - time.Sleep(NoNewDataPause) - go watcher.extractLogs(recheckHeaders, errs) - } - } + if missingHeadersFound { + watcher.extractLogs(recheckHeaders, errs) + } else { + time.Sleep(NoNewDataPause) + watcher.extractLogs(recheckHeaders, errs) } } func (watcher *EventWatcher) delegateLogs(errs chan error) { - delegateLogsErr := make(chan error) - logsFound := make(chan bool) - go watcher.LogDelegator.DelegateLogs(delegateLogsErr, logsFound) - - for { - select { - case err := <-delegateLogsErr: - errs <- err - case logs := <-logsFound: - if logs { - go watcher.delegateLogs(errs) - } else { - time.Sleep(NoNewDataPause) - go watcher.delegateLogs(errs) - } - } + err, logsFound := watcher.LogDelegator.DelegateLogs() + if err != nil { + errs <- err } + if logsFound { + watcher.delegateLogs(errs) + } else { + time.Sleep(NoNewDataPause) + watcher.delegateLogs(errs) + } } From 666ea1c325c0f176140443fdc0b4597f5885d483 Mon Sep 17 00:00:00 2001 From: Rob Mulholand Date: Mon, 26 Aug 2019 16:43:14 -0500 Subject: [PATCH 08/21] Update checked headers for new transformers - If a header was marked as checked before a transformer was added to the watcher, mark all headers since the new transformer's starting block number as unchecked. --- cmd/composeAndExecute.go | 5 +- cmd/execute.go | 5 +- .../00029_create_checked_logs_table.sql | 12 ++ db/schema.sql | 46 +++++++ go.mod | 16 +++ go.sum | 9 ++ libraries/shared/logs/extractor.go | 29 ++++- libraries/shared/logs/extractor_test.go | 117 ++++++++++++++---- libraries/shared/mocks/log_extractor.go | 12 +- libraries/shared/watcher/event_watcher.go | 11 +- .../checked_headers_repository.go | 6 + .../checked_headers_repository_test.go | 41 ++++++ .../repositories/checked_logs_repository.go | 69 +++++++++++ .../checked_logs_repository_test.go | 110 ++++++++++++++++ pkg/datastore/repository.go | 6 + pkg/fakes/checked_logs_repository.go | 39 ++++++ pkg/fakes/data.go | 20 ++- pkg/fakes/mock_checked_headers_repository.go | 33 +++-- test_config/test_config.go | 1 + 19 files changed, 538 insertions(+), 49 deletions(-) create mode 100644 db/migrations/00029_create_checked_logs_table.sql create mode 100644 pkg/datastore/postgres/repositories/checked_logs_repository.go create mode 100644 pkg/datastore/postgres/repositories/checked_logs_repository_test.go create mode 100644 pkg/fakes/checked_logs_repository.go diff --git a/cmd/composeAndExecute.go b/cmd/composeAndExecute.go index d3e0e0b9..633993fa 100644 --- a/cmd/composeAndExecute.go +++ b/cmd/composeAndExecute.go @@ -170,7 +170,10 @@ func composeAndExecute() { var wg syn.WaitGroup if len(ethEventInitializers) > 0 { ew := watcher.NewEventWatcher(&db, blockChain) - ew.AddTransformers(ethEventInitializers) + err := ew.AddTransformers(ethEventInitializers) + if err != nil { + LogWithCommand.Fatalf("failed to add event transformer initializers to watcher: %s", err.Error()) + } wg.Add(1) go watchEthEvents(&ew, &wg) } diff --git a/cmd/execute.go b/cmd/execute.go index 108eba82..1a71b297 100644 --- a/cmd/execute.go +++ b/cmd/execute.go @@ -114,7 +114,10 @@ func execute() { var wg syn.WaitGroup if len(ethEventInitializers) > 0 { ew := watcher.NewEventWatcher(&db, blockChain) - ew.AddTransformers(ethEventInitializers) + err = ew.AddTransformers(ethEventInitializers) + if err != nil { + LogWithCommand.Fatalf("failed to add event transformer initializers to watcher: %s", err.Error()) + } wg.Add(1) go watchEthEvents(&ew, &wg) } diff --git a/db/migrations/00029_create_checked_logs_table.sql b/db/migrations/00029_create_checked_logs_table.sql new file mode 100644 index 00000000..91445cd9 --- /dev/null +++ b/db/migrations/00029_create_checked_logs_table.sql @@ -0,0 +1,12 @@ +-- +goose Up +-- SQL in this section is executed when the migration is applied. +CREATE TABLE public.checked_logs +( + id SERIAL PRIMARY KEY, + contract_address VARCHAR(42), + topic_zero VARCHAR(66) +); + +-- +goose Down +-- SQL in this section is executed when the migration is rolled back. +DROP TABLE public.checked_logs; \ No newline at end of file diff --git a/db/schema.sql b/db/schema.sql index 6942d6a9..16791a83 100644 --- a/db/schema.sql +++ b/db/schema.sql @@ -156,6 +156,37 @@ CREATE SEQUENCE public.checked_headers_id_seq ALTER SEQUENCE public.checked_headers_id_seq OWNED BY public.checked_headers.id; +-- +-- Name: checked_logs; Type: TABLE; Schema: public; Owner: - +-- + +CREATE TABLE public.checked_logs ( + id integer NOT NULL, + contract_address character varying(42), + topic_zero character varying(66) +); + + +-- +-- Name: checked_logs_id_seq; Type: SEQUENCE; Schema: public; Owner: - +-- + +CREATE SEQUENCE public.checked_logs_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +-- +-- Name: checked_logs_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: - +-- + +ALTER SEQUENCE public.checked_logs_id_seq OWNED BY public.checked_logs.id; + + -- -- Name: eth_nodes; Type: TABLE; Schema: public; Owner: - -- @@ -656,6 +687,13 @@ ALTER TABLE ONLY public.blocks ALTER COLUMN id SET DEFAULT nextval('public.block ALTER TABLE ONLY public.checked_headers ALTER COLUMN id SET DEFAULT nextval('public.checked_headers_id_seq'::regclass); +-- +-- Name: checked_logs id; Type: DEFAULT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.checked_logs ALTER COLUMN id SET DEFAULT nextval('public.checked_logs_id_seq'::regclass); + + -- -- Name: eth_nodes id; Type: DEFAULT; Schema: public; Owner: - -- @@ -787,6 +825,14 @@ ALTER TABLE ONLY public.checked_headers ADD CONSTRAINT checked_headers_pkey PRIMARY KEY (id); +-- +-- Name: checked_logs checked_logs_pkey; Type: CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.checked_logs + ADD CONSTRAINT checked_logs_pkey PRIMARY KEY (id); + + -- -- Name: blocks eth_node_id_block_number_uc; Type: CONSTRAINT; Schema: public; Owner: - -- diff --git a/go.mod b/go.mod index 7dc9f4c0..3f13c0a6 100644 --- a/go.mod +++ b/go.mod @@ -6,28 +6,44 @@ require ( github.com/allegro/bigcache v1.2.1 // indirect github.com/aristanetworks/goarista v0.0.0-20190712234253-ed1100a1c015 // indirect github.com/dave/jennifer v1.3.0 + github.com/deckarep/golang-set v1.7.1 // indirect + github.com/edsrzf/mmap-go v1.0.0 // indirect + github.com/elastic/gosigar v0.10.4 // indirect github.com/ethereum/go-ethereum v1.9.1 + github.com/fjl/memsize v0.0.0-20190710130421-bcb5799ab5e5 // indirect + github.com/gballet/go-libpcsclite v0.0.0-20190607065134-2772fd86a8ff // indirect + github.com/gorilla/websocket v1.4.1 // indirect github.com/graph-gophers/graphql-go v0.0.0-20190724201507-010347b5f9e6 // indirect github.com/hashicorp/golang-lru v0.5.1 github.com/hpcloud/tail v1.0.0 + github.com/huin/goupnp v1.0.0 // indirect + github.com/jackpal/go-nat-pmp v1.0.1 // indirect github.com/jmoiron/sqlx v0.0.0-20181024163419-82935fac6c1a github.com/karalabe/usb v0.0.0-20190819132248-550797b1cad8 // indirect github.com/lib/pq v1.0.0 + github.com/mattn/go-colorable v0.1.2 // indirect github.com/mattn/go-isatty v0.0.9 // indirect + github.com/mattn/go-runewidth v0.0.4 // indirect github.com/mitchellh/go-homedir v1.1.0 + github.com/olekukonko/tablewriter v0.0.1 // indirect github.com/onsi/ginkgo v1.7.0 github.com/onsi/gomega v1.4.3 github.com/pborman/uuid v1.2.0 // indirect github.com/pressly/goose v2.6.0+incompatible github.com/prometheus/tsdb v0.10.0 // indirect + github.com/rjeczalik/notify v0.9.2 // indirect github.com/rs/cors v1.7.0 // indirect github.com/sirupsen/logrus v1.2.0 github.com/spf13/cobra v0.0.3 github.com/spf13/viper v1.3.2 + github.com/status-im/keycard-go v0.0.0-20190424133014-d95853db0f48 // indirect github.com/steakknife/bloomfilter v0.0.0-20180922174646-6819c0d2a570 // indirect github.com/steakknife/hamming v0.0.0-20180906055917-c99c65617cd3 // indirect + github.com/syndtr/goleveldb v1.0.0 // indirect github.com/tyler-smith/go-bip39 v1.0.2 // indirect + github.com/wsddn/go-ecdh v0.0.0-20161211032359-48726bab9208 // indirect golang.org/x/net v0.0.0-20190603091049-60506f45cf65 golang.org/x/sync v0.0.0-20190423024810-112230192c58 + gopkg.in/olebedev/go-duktape.v3 v3.0.0-20190709231704-1e4459ed25ff // indirect gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 ) diff --git a/go.sum b/go.sum index 976da9ae..fa407d6f 100644 --- a/go.sum +++ b/go.sum @@ -60,6 +60,7 @@ github.com/go-kit/kit v0.8.0 h1:Wz+5lgoB0kkuqLEc6NVmwRknTKP6dTGbSqvhZtBI/j0= github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= github.com/go-logfmt/logfmt v0.3.0 h1:8HUsc87TaSWLKwrnumgC8/YconD2fJQsRJAsWaPg2ic= github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= +github.com/go-sql-driver/mysql v1.4.0 h1:7LxgVwFb2hIQtMm87NdgAVfXjnt4OePseqT1tKx+opk= github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w= github.com/go-sql-driver/mysql v1.4.1 h1:g24URVg0OFbNUTx9qqY1IRZ9D9z3iPyi5zKhQZpNwpA= github.com/go-sql-driver/mysql v1.4.1/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w= @@ -67,9 +68,11 @@ github.com/go-stack/stack v1.8.0 h1:5SgMzNM5HxrEjV0ww2lTmX6E2Izsfxas4+YHWRs3Lsk= github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.1 h1:YF8+flBXS5eO826T4nzqPrxfhQThhXl0YzfuUPu4SBg= github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.2 h1:6nsPYzhq5kReh6QImI3k5qWzO4PEbvbIW2cwSfR/6xs= github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/golang/snappy v0.0.1 h1:Qgr9rKW7uDUkrbSmQeiDsGa8SjGyCOGtuasMWwvp2P4= github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/google/uuid v1.0.0 h1:b4Gk+7WdP/d3HZH8EJsZpvV7EtDOgaZLtnaNGIu1adA= @@ -78,6 +81,8 @@ github.com/google/uuid v1.1.0 h1:Jf4mxPC/ziBnoPIdpQdPJ9OeiomAUHLvxmPRSPH9m4s= github.com/google/uuid v1.1.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/gorilla/websocket v1.4.0 h1:WDFjx/TMzVgy9VdMMQi2K2Emtwi2QcUQsztZ/zLaH/Q= github.com/gorilla/websocket v1.4.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= +github.com/gorilla/websocket v1.4.1 h1:q7AeDBpnBk8AogcD4DSag/Ukw/KV+YhzLj2bP5HvKCM= +github.com/gorilla/websocket v1.4.1/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= github.com/graph-gophers/graphql-go v0.0.0-20190610161739-8f92f34fc598/go.mod h1:Au3iQ8DvDis8hZ4q2OzRcaKYlAsPt+fYvib5q4nIqu4= github.com/graph-gophers/graphql-go v0.0.0-20190724201507-010347b5f9e6 h1:9WiNlI9Cds5S5YITwRpRs8edNaq0nxTEymhDW20A1QE= github.com/graph-gophers/graphql-go v0.0.0-20190724201507-010347b5f9e6/go.mod h1:Au3iQ8DvDis8hZ4q2OzRcaKYlAsPt+fYvib5q4nIqu4= @@ -132,6 +137,7 @@ github.com/mattn/go-isatty v0.0.9 h1:d5US/mDsogSGW37IV293h//ZFaeajb69h+EHFsv2xGg github.com/mattn/go-isatty v0.0.9/go.mod h1:YNRxwqDuOph6SZLI9vUUz6OYw3QyUt7WiY2yME+cCiQ= github.com/mattn/go-runewidth v0.0.4 h1:2BvfKmzob6Bmd4YsL0zygOqfdFnK7GR4QL06Do4/p7Y= github.com/mattn/go-runewidth v0.0.4/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU= +github.com/mattn/go-sqlite3 v1.9.0 h1:pDRiWfl+++eC2FEFRy6jXmQlvp4Yh3z1MJKg4UeYM/4= github.com/mattn/go-sqlite3 v1.9.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc= github.com/mattn/go-sqlite3 v1.11.0 h1:LDdKkqtYlom37fkvqs8rMPFKAMe8+SgjbwZ6ex1/A/Q= github.com/mattn/go-sqlite3 v1.11.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc= @@ -166,6 +172,7 @@ github.com/pborman/uuid v1.2.0 h1:J7Q5mO4ysT1dv8hyrUGHb9+ooztCXu1D8MY8DZYsu3g= github.com/pborman/uuid v1.2.0/go.mod h1:X/NO0urCmaxf9VXbdlT7C2Yzkj2IKimNn4k+gtPdI/k= github.com/pelletier/go-toml v1.2.0 h1:T5zMGML61Wp+FlcbWjRDT7yAxhJNAiPPLOFECq181zc= github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic= +github.com/pkg/errors v0.8.0 h1:WdK/asTD0HN+q6hsWO3/vpuAkAr+tw6aNJNDFFf0+qw= github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= @@ -226,6 +233,8 @@ github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UV github.com/syndtr/goleveldb v0.0.0-20180815032940-ae2bd5eed72d/go.mod h1:Z4AUp2Km+PwemOoO/VB5AOx9XSsIItzFjoJlOSiYmn0= github.com/syndtr/goleveldb v0.0.0-20181128100959-b001fa50d6b2 h1:GnOzE5fEFN3b2zDhJJABEofdb51uMRNb8eqIVtdducs= github.com/syndtr/goleveldb v0.0.0-20181128100959-b001fa50d6b2/go.mod h1:Z4AUp2Km+PwemOoO/VB5AOx9XSsIItzFjoJlOSiYmn0= +github.com/syndtr/goleveldb v1.0.0 h1:fBdIW9lB4Iz0n9khmH8w27SJ3QEJ7+IgjPEwGSZiFdE= +github.com/syndtr/goleveldb v1.0.0/go.mod h1:ZVVdQEZoIme9iO1Ch2Jdy24qqXrMMOU6lpPAyBWyWuQ= github.com/tyler-smith/go-bip39 v1.0.0/go.mod h1:sJ5fKU0s6JVwZjjcUEX2zFOnvq0ASQ2K9Zr6cf67kNs= github.com/tyler-smith/go-bip39 v1.0.2 h1:+t3w+KwLXO6154GNJY+qUtIxLTmFjfUmpguQT1OlOT8= github.com/tyler-smith/go-bip39 v1.0.2/go.mod h1:sJ5fKU0s6JVwZjjcUEX2zFOnvq0ASQ2K9Zr6cf67kNs= diff --git a/libraries/shared/logs/extractor.go b/libraries/shared/logs/extractor.go index f8132352..aed39d02 100644 --- a/libraries/shared/logs/extractor.go +++ b/libraries/shared/logs/extractor.go @@ -36,13 +36,14 @@ const ( ) type ILogExtractor interface { - AddTransformerConfig(config transformer.EventTransformerConfig) + AddTransformerConfig(config transformer.EventTransformerConfig) error ExtractLogs(recheckHeaders constants.TransformerExecution) (error, bool) } type LogExtractor struct { Addresses []common.Address CheckedHeadersRepository datastore.CheckedHeadersRepository + CheckedLogsRepository datastore.CheckedLogsRepository Fetcher fetcher.ILogFetcher LogRepository datastore.HeaderSyncLogRepository StartingBlock *int64 @@ -51,7 +52,12 @@ type LogExtractor struct { } // Add additional logs to extract -func (extractor *LogExtractor) AddTransformerConfig(config transformer.EventTransformerConfig) { +func (extractor *LogExtractor) AddTransformerConfig(config transformer.EventTransformerConfig) error { + checkedHeadersErr := extractor.updateCheckedHeaders(config) + if checkedHeadersErr != nil { + return checkedHeadersErr + } + if extractor.StartingBlock == nil { extractor.StartingBlock = &config.StartingBlockNumber } else if earlierStartingBlockNumber(config.StartingBlockNumber, *extractor.StartingBlock) { @@ -61,6 +67,7 @@ func (extractor *LogExtractor) AddTransformerConfig(config transformer.EventTran addresses := transformer.HexStringsToAddresses(config.ContractAddresses) extractor.Addresses = append(extractor.Addresses, addresses...) extractor.Topics = append(extractor.Topics, common.HexToHash(config.Topic)) + return nil } // Fetch and persist watched logs @@ -129,3 +136,21 @@ func getCheckCount(recheckHeaders constants.TransformerExecution) int64 { return constants.RecheckHeaderCap } } + +func (extractor *LogExtractor) updateCheckedHeaders(config transformer.EventTransformerConfig) error { + hasBeenChecked, hasBeenCheckedErr := extractor.CheckedLogsRepository.HaveLogsBeenChecked(config.ContractAddresses, config.Topic) + if hasBeenCheckedErr != nil { + return hasBeenCheckedErr + } + if !hasBeenChecked { + err := extractor.CheckedHeadersRepository.MarkHeadersUnchecked(config.StartingBlockNumber) + if err != nil { + return err + } + nextErr := extractor.CheckedLogsRepository.MarkLogsChecked(config.ContractAddresses, config.Topic) + if nextErr != nil { + return nextErr + } + } + return nil +} diff --git a/libraries/shared/logs/extractor_test.go b/libraries/shared/logs/extractor_test.go index d21f2f6a..1ce47cc7 100644 --- a/libraries/shared/logs/extractor_test.go +++ b/libraries/shared/logs/extractor_test.go @@ -31,42 +31,52 @@ import ( ) var _ = Describe("Log extractor", func() { - var extractor *logs.LogExtractor + var ( + checkedHeadersRepository *fakes.MockCheckedHeadersRepository + checkedLogsRepository *fakes.MockCheckedLogsRepository + extractor *logs.LogExtractor + ) BeforeEach(func() { + checkedHeadersRepository = &fakes.MockCheckedHeadersRepository{} + checkedLogsRepository = &fakes.MockCheckedLogsRepository{} extractor = &logs.LogExtractor{ + CheckedHeadersRepository: checkedHeadersRepository, + CheckedLogsRepository: checkedLogsRepository, Fetcher: &mocks.MockLogFetcher{}, - CheckedHeadersRepository: &fakes.MockCheckedHeadersRepository{}, LogRepository: &fakes.MockHeaderSyncLogRepository{}, Syncer: &fakes.MockTransactionSyncer{}, } }) Describe("AddTransformerConfig", func() { - It("it includes earliest starting block number in fetch logs query", func() { + It("updates extractor's starting block number to earliest available", func() { earlierStartingBlockNumber := rand.Int63() laterStartingBlockNumber := earlierStartingBlockNumber + 1 - extractor.AddTransformerConfig(getTransformerConfig(laterStartingBlockNumber)) - extractor.AddTransformerConfig(getTransformerConfig(earlierStartingBlockNumber)) + errOne := extractor.AddTransformerConfig(getTransformerConfig(laterStartingBlockNumber)) + Expect(errOne).NotTo(HaveOccurred()) + errTwo := extractor.AddTransformerConfig(getTransformerConfig(earlierStartingBlockNumber)) + Expect(errTwo).NotTo(HaveOccurred()) Expect(*extractor.StartingBlock).To(Equal(earlierStartingBlockNumber)) }) - It("includes added addresses in fetch logs query", func() { + It("adds transformer's addresses to extractor's watched addresses", func() { addresses := []string{"0xA", "0xB"} configWithAddresses := transformer.EventTransformerConfig{ ContractAddresses: addresses, StartingBlockNumber: rand.Int63(), } - extractor.AddTransformerConfig(configWithAddresses) + err := extractor.AddTransformerConfig(configWithAddresses) + Expect(err).NotTo(HaveOccurred()) expectedAddresses := transformer.HexStringsToAddresses(addresses) Expect(extractor.Addresses).To(Equal(expectedAddresses)) }) - It("includes added topics in fetch logs query", func() { + It("adds transformer's topic to extractor's watched topics", func() { topic := "0x1" configWithTopic := transformer.EventTransformerConfig{ ContractAddresses: []string{fakes.FakeAddress.Hex()}, @@ -74,10 +84,75 @@ var _ = Describe("Log extractor", func() { StartingBlockNumber: rand.Int63(), } - extractor.AddTransformerConfig(configWithTopic) + err := extractor.AddTransformerConfig(configWithTopic) + Expect(err).NotTo(HaveOccurred()) Expect(extractor.Topics).To(Equal([]common.Hash{common.HexToHash(topic)})) }) + + It("returns error if checking whether log has been checked returns error", func() { + checkedLogsRepository.HasLogBeenCheckedError = fakes.FakeError + + err := extractor.AddTransformerConfig(getTransformerConfig(rand.Int63())) + + Expect(err).To(HaveOccurred()) + Expect(err).To(MatchError(fakes.FakeError)) + }) + + Describe("when log has previously been checked", func() { + It("does not mark any headers unchecked", func() { + checkedLogsRepository.HasLogBeenCheckedReturn = true + + err := extractor.AddTransformerConfig(getTransformerConfig(rand.Int63())) + + Expect(err).NotTo(HaveOccurred()) + Expect(checkedHeadersRepository.MarkHeadersUncheckedCalled).To(BeFalse()) + }) + }) + + Describe("when log has not previously been checked", func() { + BeforeEach(func() { + checkedLogsRepository.HasLogBeenCheckedReturn = false + }) + + It("marks headers since transformer's starting block number as unchecked", func() { + blockNumber := rand.Int63() + + err := extractor.AddTransformerConfig(getTransformerConfig(blockNumber)) + + Expect(err).NotTo(HaveOccurred()) + Expect(checkedHeadersRepository.MarkHeadersUncheckedCalled).To(BeTrue()) + Expect(checkedHeadersRepository.MarkHeadersUncheckedStartingBlockNumber).To(Equal(blockNumber)) + }) + + It("returns error if marking headers unchecked returns error", func() { + checkedHeadersRepository.MarkHeadersUncheckedReturnError = fakes.FakeError + + err := extractor.AddTransformerConfig(getTransformerConfig(rand.Int63())) + + Expect(err).To(HaveOccurred()) + Expect(err).To(MatchError(fakes.FakeError)) + }) + + It("persists that tranformer's log has been checked", func() { + config := getTransformerConfig(rand.Int63()) + + err := extractor.AddTransformerConfig(config) + + Expect(err).NotTo(HaveOccurred()) + Expect(checkedLogsRepository.MarkLogCheckedAddresses).To(Equal(config.ContractAddresses)) + Expect(checkedLogsRepository.MarkLogCheckedTopicZero).To(Equal(config.Topic)) + }) + + It("returns error if marking logs checked returns error", func() { + checkedLogsRepository.MarkLogCheckedError = fakes.FakeError + + err := extractor.AddTransformerConfig(getTransformerConfig(rand.Int63())) + + Expect(err).To(HaveOccurred()) + Expect(err).To(MatchError(fakes.FakeError)) + }) + }) }) Describe("ExtractLogs", func() { @@ -91,7 +166,7 @@ var _ = Describe("Log extractor", func() { Describe("when checking missing headers", func() { It("gets missing headers since configured starting block with check_count < 1", func() { mockCheckedHeadersRepository := &fakes.MockCheckedHeadersRepository{} - mockCheckedHeadersRepository.ReturnHeaders = []core.Header{{}} + mockCheckedHeadersRepository.MissingHeadersReturnHeaders = []core.Header{{}} extractor.CheckedHeadersRepository = mockCheckedHeadersRepository startingBlockNumber := rand.Int63() extractor.AddTransformerConfig(getTransformerConfig(startingBlockNumber)) @@ -99,16 +174,16 @@ var _ = Describe("Log extractor", func() { err, _ := extractor.ExtractLogs(constants.HeaderMissing) Expect(err).NotTo(HaveOccurred()) - Expect(mockCheckedHeadersRepository.StartingBlockNumber).To(Equal(startingBlockNumber)) - Expect(mockCheckedHeadersRepository.EndingBlockNumber).To(Equal(int64(-1))) - Expect(mockCheckedHeadersRepository.CheckCount).To(Equal(int64(1))) + Expect(mockCheckedHeadersRepository.MissingHeadersStartingBlockNumber).To(Equal(startingBlockNumber)) + Expect(mockCheckedHeadersRepository.MissingHeadersEndingBlockNumber).To(Equal(int64(-1))) + Expect(mockCheckedHeadersRepository.MissingHeadersCheckCount).To(Equal(int64(1))) }) }) Describe("when rechecking headers", func() { It("gets missing headers since configured starting block with check_count < RecheckHeaderCap", func() { mockCheckedHeadersRepository := &fakes.MockCheckedHeadersRepository{} - mockCheckedHeadersRepository.ReturnHeaders = []core.Header{{}} + mockCheckedHeadersRepository.MissingHeadersReturnHeaders = []core.Header{{}} extractor.CheckedHeadersRepository = mockCheckedHeadersRepository startingBlockNumber := rand.Int63() extractor.AddTransformerConfig(getTransformerConfig(startingBlockNumber)) @@ -116,9 +191,9 @@ var _ = Describe("Log extractor", func() { err, _ := extractor.ExtractLogs(constants.HeaderRecheck) Expect(err).NotTo(HaveOccurred()) - Expect(mockCheckedHeadersRepository.StartingBlockNumber).To(Equal(startingBlockNumber)) - Expect(mockCheckedHeadersRepository.EndingBlockNumber).To(Equal(int64(-1))) - Expect(mockCheckedHeadersRepository.CheckCount).To(Equal(constants.RecheckHeaderCap)) + Expect(mockCheckedHeadersRepository.MissingHeadersStartingBlockNumber).To(Equal(startingBlockNumber)) + Expect(mockCheckedHeadersRepository.MissingHeadersEndingBlockNumber).To(Equal(int64(-1))) + Expect(mockCheckedHeadersRepository.MissingHeadersCheckCount).To(Equal(constants.RecheckHeaderCap)) }) }) @@ -274,20 +349,20 @@ var _ = Describe("Log extractor", func() { addTransformerConfig(extractor) mockCheckedHeadersRepository := &fakes.MockCheckedHeadersRepository{} headerID := rand.Int63() - mockCheckedHeadersRepository.ReturnHeaders = []core.Header{{Id: headerID}} + mockCheckedHeadersRepository.MissingHeadersReturnHeaders = []core.Header{{Id: headerID}} extractor.CheckedHeadersRepository = mockCheckedHeadersRepository err, _ := extractor.ExtractLogs(constants.HeaderMissing) Expect(err).NotTo(HaveOccurred()) - Expect(mockCheckedHeadersRepository.HeaderID).To(Equal(headerID)) + Expect(mockCheckedHeadersRepository.MarkHeaderCheckedHeaderID).To(Equal(headerID)) }) It("returns error if marking header checked fails", func() { addFetchedLog(extractor) addTransformerConfig(extractor) mockCheckedHeadersRepository := &fakes.MockCheckedHeadersRepository{} - mockCheckedHeadersRepository.ReturnHeaders = []core.Header{{Id: rand.Int63()}} + mockCheckedHeadersRepository.MissingHeadersReturnHeaders = []core.Header{{Id: rand.Int63()}} mockCheckedHeadersRepository.MarkHeaderCheckedReturnError = fakes.FakeError extractor.CheckedHeadersRepository = mockCheckedHeadersRepository @@ -321,7 +396,7 @@ func addTransformerConfig(extractor *logs.LogExtractor) { func addMissingHeader(extractor *logs.LogExtractor) { mockCheckedHeadersRepository := &fakes.MockCheckedHeadersRepository{} - mockCheckedHeadersRepository.ReturnHeaders = []core.Header{{}} + mockCheckedHeadersRepository.MissingHeadersReturnHeaders = []core.Header{{}} extractor.CheckedHeadersRepository = mockCheckedHeadersRepository } diff --git a/libraries/shared/mocks/log_extractor.go b/libraries/shared/mocks/log_extractor.go index 1cb26132..54511f98 100644 --- a/libraries/shared/mocks/log_extractor.go +++ b/libraries/shared/mocks/log_extractor.go @@ -22,14 +22,16 @@ import ( ) type MockLogExtractor struct { - AddedConfigs []transformer.EventTransformerConfig - ExtractLogsCount int - ExtractLogsErrors []error - MissingHeadersExist []bool + AddedConfigs []transformer.EventTransformerConfig + AddTransformerConfigError error + ExtractLogsCount int + ExtractLogsErrors []error + MissingHeadersExist []bool } -func (extractor *MockLogExtractor) AddTransformerConfig(config transformer.EventTransformerConfig) { +func (extractor *MockLogExtractor) AddTransformerConfig(config transformer.EventTransformerConfig) error { extractor.AddedConfigs = append(extractor.AddedConfigs, config) + return extractor.AddTransformerConfigError } func (extractor *MockLogExtractor) ExtractLogs(recheckHeaders constants.TransformerExecution) (error, bool) { diff --git a/libraries/shared/watcher/event_watcher.go b/libraries/shared/watcher/event_watcher.go index e10da6c6..04386c63 100644 --- a/libraries/shared/watcher/event_watcher.go +++ b/libraries/shared/watcher/event_watcher.go @@ -41,8 +41,9 @@ type EventWatcher struct { func NewEventWatcher(db *postgres.DB, bc core.BlockChain) EventWatcher { extractor := &logs.LogExtractor{ - Fetcher: fetcher.NewLogFetcher(bc), CheckedHeadersRepository: repositories.NewCheckedHeadersRepository(db), + CheckedLogsRepository: repositories.NewCheckedLogsRepository(db), + Fetcher: fetcher.NewLogFetcher(bc), LogRepository: repositories.NewHeaderSyncLogRepository(db), Syncer: transactions.NewTransactionsSyncer(db, bc), } @@ -59,13 +60,17 @@ func NewEventWatcher(db *postgres.DB, bc core.BlockChain) EventWatcher { } // Adds transformers to the watcher so that their logs will be extracted and delegated. -func (watcher *EventWatcher) AddTransformers(initializers []transformer.EventTransformerInitializer) { +func (watcher *EventWatcher) AddTransformers(initializers []transformer.EventTransformerInitializer) error { for _, initializer := range initializers { t := initializer(watcher.db) watcher.LogDelegator.AddTransformer(t) - watcher.LogExtractor.AddTransformerConfig(t.GetConfig()) + err := watcher.LogExtractor.AddTransformerConfig(t.GetConfig()) + if err != nil { + return err + } } + return nil } // Extracts and delegates watched log events. diff --git a/pkg/datastore/postgres/repositories/checked_headers_repository.go b/pkg/datastore/postgres/repositories/checked_headers_repository.go index 341cb272..1e33e425 100644 --- a/pkg/datastore/postgres/repositories/checked_headers_repository.go +++ b/pkg/datastore/postgres/repositories/checked_headers_repository.go @@ -41,6 +41,12 @@ func (repo CheckedHeadersRepository) MarkHeaderChecked(headerID int64) error { return err } +// Remove checked_headers rows with block number >= starting block number +func (repo CheckedHeadersRepository) MarkHeadersUnchecked(startingBlockNumber int64) error { + _, err := repo.db.Exec(`DELETE FROM public.checked_headers WHERE header_id IN (SELECT id FROM public.headers WHERE block_number >= $1)`, startingBlockNumber) + return err +} + // Return header_id if not present in checked_headers or its check_count is < passed checkCount func (repo CheckedHeadersRepository) MissingHeaders(startingBlockNumber, endingBlockNumber, checkCount int64) ([]core.Header, error) { var result []core.Header diff --git a/pkg/datastore/postgres/repositories/checked_headers_repository_test.go b/pkg/datastore/postgres/repositories/checked_headers_repository_test.go index c9a9284a..5ee5780b 100644 --- a/pkg/datastore/postgres/repositories/checked_headers_repository_test.go +++ b/pkg/datastore/postgres/repositories/checked_headers_repository_test.go @@ -73,6 +73,47 @@ var _ = Describe("Checked Headers repository", func() { }) }) + Describe("MarkHeadersUnchecked", func() { + It("removes rows for headers <= starting block number", func() { + blockNumberOne := rand.Int63() + blockNumberTwo := blockNumberOne + 1 + blockNumberThree := blockNumberOne + 2 + fakeHeaderOne := fakes.GetFakeHeader(blockNumberOne) + fakeHeaderTwo := fakes.GetFakeHeader(blockNumberTwo) + fakeHeaderThree := fakes.GetFakeHeader(blockNumberThree) + headerRepository := repositories.NewHeaderRepository(db) + // insert three headers with incrementing block number + headerIdOne, insertHeaderOneErr := headerRepository.CreateOrUpdateHeader(fakeHeaderOne) + Expect(insertHeaderOneErr).NotTo(HaveOccurred()) + headerIdTwo, insertHeaderTwoErr := headerRepository.CreateOrUpdateHeader(fakeHeaderTwo) + Expect(insertHeaderTwoErr).NotTo(HaveOccurred()) + headerIdThree, insertHeaderThreeErr := headerRepository.CreateOrUpdateHeader(fakeHeaderThree) + Expect(insertHeaderThreeErr).NotTo(HaveOccurred()) + // mark all headers checked + markHeaderOneCheckedErr := repo.MarkHeaderChecked(headerIdOne) + Expect(markHeaderOneCheckedErr).NotTo(HaveOccurred()) + markHeaderTwoCheckedErr := repo.MarkHeaderChecked(headerIdTwo) + Expect(markHeaderTwoCheckedErr).NotTo(HaveOccurred()) + markHeaderThreeCheckedErr := repo.MarkHeaderChecked(headerIdThree) + Expect(markHeaderThreeCheckedErr).NotTo(HaveOccurred()) + + // mark headers unchecked since blockNumberTwo + err := repo.MarkHeadersUnchecked(blockNumberTwo) + + Expect(err).NotTo(HaveOccurred()) + var headerOneChecked, headerTwoChecked, headerThreeChecked bool + getHeaderOneErr := db.Get(&headerOneChecked, `SELECT EXISTS(SELECT 1 FROM public.checked_headers WHERE header_id = $1)`, headerIdOne) + Expect(getHeaderOneErr).NotTo(HaveOccurred()) + Expect(headerOneChecked).To(BeTrue()) + getHeaderTwoErr := db.Get(&headerTwoChecked, `SELECT EXISTS(SELECT 1 FROM public.checked_headers WHERE header_id = $1)`, headerIdTwo) + Expect(getHeaderTwoErr).NotTo(HaveOccurred()) + Expect(headerTwoChecked).To(BeFalse()) + getHeaderThreeErr := db.Get(&headerThreeChecked, `SELECT EXISTS(SELECT 1 FROM public.checked_headers WHERE header_id = $1)`, headerIdThree) + Expect(getHeaderThreeErr).NotTo(HaveOccurred()) + Expect(headerThreeChecked).To(BeFalse()) + }) + }) + Describe("MissingHeaders", func() { var ( headerRepository datastore.HeaderRepository diff --git a/pkg/datastore/postgres/repositories/checked_logs_repository.go b/pkg/datastore/postgres/repositories/checked_logs_repository.go new file mode 100644 index 00000000..113be3ed --- /dev/null +++ b/pkg/datastore/postgres/repositories/checked_logs_repository.go @@ -0,0 +1,69 @@ +// VulcanizeDB +// Copyright © 2019 Vulcanize + +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU Affero General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU Affero General Public License for more details. + +// You should have received a copy of the GNU Affero General Public License +// along with this program. If not, see . + +package repositories + +import ( + "github.com/sirupsen/logrus" + "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres" +) + +type CheckedLogsRepository struct { + db *postgres.DB +} + +func NewCheckedLogsRepository(db *postgres.DB) CheckedLogsRepository { + return CheckedLogsRepository{db: db} +} + +// Return whether a given address + topic0 has been fetched on a previous run of vDB +func (repository CheckedLogsRepository) HaveLogsBeenChecked(addresses []string, topic0 string) (bool, error) { + for _, address := range addresses { + var addressExists bool + getAddressExistsErr := repository.db.Get(&addressExists, `SELECT EXISTS(SELECT 1 FROM public.checked_logs WHERE contract_address = $1)`, address) + if getAddressExistsErr != nil { + return false, getAddressExistsErr + } + if !addressExists { + return false, nil + } + } + var topicZeroExists bool + getTopicZeroExistsErr := repository.db.Get(&topicZeroExists, `SELECT EXISTS(SELECT 1 FROM public.checked_logs WHERE topic_zero = $1)`, topic0) + if getTopicZeroExistsErr != nil { + return false, getTopicZeroExistsErr + } + return topicZeroExists, nil +} + +// Persist that a given address + topic0 has is being fetched on this run of vDB +func (repository CheckedLogsRepository) MarkLogsChecked(addresses []string, topic0 string) error { + tx, txErr := repository.db.Beginx() + if txErr != nil { + return txErr + } + for _, address := range addresses { + _, insertErr := tx.Exec(`INSERT INTO public.checked_logs (contract_address, topic_zero) VALUES ($1, $2)`, address, topic0) + if insertErr != nil { + rollbackErr := tx.Rollback() + if rollbackErr != nil { + logrus.Errorf("error rolling back transaction inserting checked logs: %s", rollbackErr.Error()) + } + return insertErr + } + } + return tx.Commit() +} diff --git a/pkg/datastore/postgres/repositories/checked_logs_repository_test.go b/pkg/datastore/postgres/repositories/checked_logs_repository_test.go new file mode 100644 index 00000000..351bbcaf --- /dev/null +++ b/pkg/datastore/postgres/repositories/checked_logs_repository_test.go @@ -0,0 +1,110 @@ +// VulcanizeDB +// Copyright © 2019 Vulcanize + +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU Affero General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU Affero General Public License for more details. + +// You should have received a copy of the GNU Affero General Public License +// along with this program. If not, see . + +package repositories_test + +import ( + "github.com/ethereum/go-ethereum/common" + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + "github.com/vulcanize/vulcanizedb/pkg/datastore" + "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres" + "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres/repositories" + "github.com/vulcanize/vulcanizedb/pkg/fakes" + "github.com/vulcanize/vulcanizedb/test_config" +) + +var _ = Describe("Checked logs repository", func() { + var ( + db *postgres.DB + fakeAddress = fakes.FakeAddress.Hex() + fakeAddresses = []string{fakeAddress} + fakeTopicZero = fakes.FakeHash.Hex() + repository datastore.CheckedLogsRepository + ) + + BeforeEach(func() { + db = test_config.NewTestDB(test_config.NewTestNode()) + test_config.CleanTestDB(db) + repository = repositories.NewCheckedLogsRepository(db) + }) + + Describe("HaveLogsBeenChecked", func() { + It("returns true if all addresses and the topic0 are already present in the db", func() { + _, insertErr := db.Exec(`INSERT INTO public.checked_logs (contract_address, topic_zero) VALUES ($1, $2)`, fakeAddress, fakeTopicZero) + Expect(insertErr).NotTo(HaveOccurred()) + + hasBeenChecked, err := repository.HaveLogsBeenChecked(fakeAddresses, fakeTopicZero) + + Expect(err).NotTo(HaveOccurred()) + Expect(hasBeenChecked).To(BeTrue()) + }) + + It("returns true if addresses and topic0 were fetched because of a combination of other transformers", func() { + anotherFakeAddress := common.HexToAddress("0x" + fakes.RandomString(40)).Hex() + anotherFakeTopicZero := common.HexToHash("0x" + fakes.RandomString(64)).Hex() + // insert row with matching address but different topic0 + _, insertOneErr := db.Exec(`INSERT INTO public.checked_logs (contract_address, topic_zero) VALUES ($1, $2)`, fakeAddress, anotherFakeTopicZero) + Expect(insertOneErr).NotTo(HaveOccurred()) + // insert row with matching topic0 but different address + _, insertTwoErr := db.Exec(`INSERT INTO public.checked_logs (contract_address, topic_zero) VALUES ($1, $2)`, anotherFakeAddress, fakeTopicZero) + Expect(insertTwoErr).NotTo(HaveOccurred()) + + hasBeenChecked, err := repository.HaveLogsBeenChecked(fakeAddresses, fakeTopicZero) + + Expect(err).NotTo(HaveOccurred()) + Expect(hasBeenChecked).To(BeTrue()) + }) + + It("returns false if any address has not been checked", func() { + anotherFakeAddress := common.HexToAddress("0x" + fakes.RandomString(40)).Hex() + _, insertErr := db.Exec(`INSERT INTO public.checked_logs (contract_address, topic_zero) VALUES ($1, $2)`, fakeAddress, fakeTopicZero) + Expect(insertErr).NotTo(HaveOccurred()) + + hasBeenChecked, err := repository.HaveLogsBeenChecked(append(fakeAddresses, anotherFakeAddress), fakeTopicZero) + + Expect(err).NotTo(HaveOccurred()) + Expect(hasBeenChecked).To(BeFalse()) + }) + + It("returns false if topic0 has not been checked", func() { + anotherFakeTopicZero := common.HexToHash("0x" + fakes.RandomString(64)).Hex() + _, insertErr := db.Exec(`INSERT INTO public.checked_logs (contract_address, topic_zero) VALUES ($1, $2)`, fakeAddress, anotherFakeTopicZero) + Expect(insertErr).NotTo(HaveOccurred()) + + hasBeenChecked, err := repository.HaveLogsBeenChecked(fakeAddresses, fakeTopicZero) + + Expect(err).NotTo(HaveOccurred()) + Expect(hasBeenChecked).To(BeFalse()) + }) + }) + + Describe("MarkLogsChecked", func() { + It("adds a row for all of transformer's addresses + topic0", func() { + anotherFakeAddress := common.HexToAddress("0x" + fakes.RandomString(40)).Hex() + err := repository.MarkLogsChecked(append(fakeAddresses, anotherFakeAddress), fakeTopicZero) + + Expect(err).NotTo(HaveOccurred()) + var comboOneExists, comboTwoExists bool + getComboOneErr := db.Get(&comboOneExists, `SELECT EXISTS(SELECT 1 FROM public.checked_logs WHERE contract_address = $1 AND topic_zero = $2)`, fakeAddress, fakeTopicZero) + Expect(getComboOneErr).NotTo(HaveOccurred()) + Expect(comboOneExists).To(BeTrue()) + getComboTwoErr := db.Get(&comboTwoExists, `SELECT EXISTS(SELECT 1 FROM public.checked_logs WHERE contract_address = $1 AND topic_zero = $2)`, anotherFakeAddress, fakeTopicZero) + Expect(getComboTwoErr).NotTo(HaveOccurred()) + Expect(comboTwoExists).To(BeTrue()) + }) + }) +}) diff --git a/pkg/datastore/repository.go b/pkg/datastore/repository.go index 2fbeaf9e..f762bb12 100644 --- a/pkg/datastore/repository.go +++ b/pkg/datastore/repository.go @@ -36,9 +36,15 @@ type BlockRepository interface { type CheckedHeadersRepository interface { MarkHeaderChecked(headerID int64) error + MarkHeadersUnchecked(startingBlockNumber int64) error MissingHeaders(startingBlockNumber, endingBlockNumber, checkCount int64) ([]core.Header, error) } +type CheckedLogsRepository interface { + HaveLogsBeenChecked(addresses []string, topic0 string) (bool, error) + MarkLogsChecked(addresses []string, topic0 string) error +} + type ContractRepository interface { CreateContract(contract core.Contract) error GetContract(contractHash string) (core.Contract, error) diff --git a/pkg/fakes/checked_logs_repository.go b/pkg/fakes/checked_logs_repository.go new file mode 100644 index 00000000..fca57e96 --- /dev/null +++ b/pkg/fakes/checked_logs_repository.go @@ -0,0 +1,39 @@ +// VulcanizeDB +// Copyright © 2019 Vulcanize + +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU Affero General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU Affero General Public License for more details. + +// You should have received a copy of the GNU Affero General Public License +// along with this program. If not, see . + +package fakes + +type MockCheckedLogsRepository struct { + HasLogBeenCheckedAddresses []string + HasLogBeenCheckedError error + HasLogBeenCheckedReturn bool + HasLogBeenCheckedTopicZero string + MarkLogCheckedAddresses []string + MarkLogCheckedError error + MarkLogCheckedTopicZero string +} + +func (repository *MockCheckedLogsRepository) HaveLogsBeenChecked(addresses []string, topic0 string) (bool, error) { + repository.HasLogBeenCheckedAddresses = addresses + repository.HasLogBeenCheckedTopicZero = topic0 + return repository.HasLogBeenCheckedReturn, repository.HasLogBeenCheckedError +} + +func (repository *MockCheckedLogsRepository) MarkLogsChecked(addresses []string, topic0 string) error { + repository.MarkLogCheckedAddresses = addresses + repository.MarkLogCheckedTopicZero = topic0 + return repository.MarkLogCheckedError +} diff --git a/pkg/fakes/data.go b/pkg/fakes/data.go index cebcfb2a..3c6b7046 100644 --- a/pkg/fakes/data.go +++ b/pkg/fakes/data.go @@ -20,16 +20,16 @@ import ( "bytes" "encoding/json" "errors" - "strconv" - "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/core/types" - "github.com/vulcanize/vulcanizedb/pkg/core" + "math/rand" + "strconv" + "time" ) var ( - FakeAddress = common.HexToAddress("0x1234567890abcdef") + FakeAddress = common.HexToAddress("0x" + RandomString(40)) FakeError = errors.New("failed") FakeHash = common.BytesToHash([]byte{1, 2, 3, 4, 5}) fakeTimestamp = int64(111111111) @@ -103,3 +103,15 @@ func GetFakeUncle(hash, reward string) core.Uncle { Timestamp: strconv.FormatInt(fakeTimestamp, 10), } } + +func RandomString(length int) string { + var seededRand = rand.New( + rand.NewSource(time.Now().UnixNano())) + charset := "abcdef1234567890" + b := make([]byte, length) + for i := range b { + b[i] = charset[seededRand.Intn(len(charset))] + } + + return string(b) +} diff --git a/pkg/fakes/mock_checked_headers_repository.go b/pkg/fakes/mock_checked_headers_repository.go index 4e574b07..687bf160 100644 --- a/pkg/fakes/mock_checked_headers_repository.go +++ b/pkg/fakes/mock_checked_headers_repository.go @@ -21,23 +21,32 @@ import ( ) type MockCheckedHeadersRepository struct { - CheckCount int64 - StartingBlockNumber int64 - EndingBlockNumber int64 - HeaderID int64 - ReturnHeaders []core.Header - MarkHeaderCheckedReturnError error - MissingHeadersReturnError error + MarkHeaderCheckedHeaderID int64 + MarkHeaderCheckedReturnError error + MarkHeadersUncheckedCalled bool + MarkHeadersUncheckedReturnError error + MarkHeadersUncheckedStartingBlockNumber int64 + MissingHeadersCheckCount int64 + MissingHeadersEndingBlockNumber int64 + MissingHeadersReturnError error + MissingHeadersReturnHeaders []core.Header + MissingHeadersStartingBlockNumber int64 +} + +func (repository *MockCheckedHeadersRepository) MarkHeadersUnchecked(startingBlockNumber int64) error { + repository.MarkHeadersUncheckedCalled = true + repository.MarkHeadersUncheckedStartingBlockNumber = startingBlockNumber + return repository.MarkHeadersUncheckedReturnError } func (repository *MockCheckedHeadersRepository) MarkHeaderChecked(headerID int64) error { - repository.HeaderID = headerID + repository.MarkHeaderCheckedHeaderID = headerID return repository.MarkHeaderCheckedReturnError } func (repository *MockCheckedHeadersRepository) MissingHeaders(startingBlockNumber, endingBlockNumber, checkCount int64) ([]core.Header, error) { - repository.StartingBlockNumber = startingBlockNumber - repository.EndingBlockNumber = endingBlockNumber - repository.CheckCount = checkCount - return repository.ReturnHeaders, repository.MissingHeadersReturnError + repository.MissingHeadersStartingBlockNumber = startingBlockNumber + repository.MissingHeadersEndingBlockNumber = endingBlockNumber + repository.MissingHeadersCheckCount = checkCount + return repository.MissingHeadersReturnHeaders, repository.MissingHeadersReturnError } diff --git a/test_config/test_config.go b/test_config/test_config.go index f9fb3975..f3dad64e 100644 --- a/test_config/test_config.go +++ b/test_config/test_config.go @@ -106,6 +106,7 @@ func CleanTestDB(db *postgres.DB) { db.MustExec("DELETE FROM addresses") db.MustExec("DELETE FROM blocks") db.MustExec("DELETE FROM checked_headers") + db.MustExec("DELETE FROM checked_logs") // can't delete from eth_nodes since this function is called after the required eth_node is persisted db.MustExec("DELETE FROM full_sync_logs") db.MustExec("DELETE FROM full_sync_receipts") From 222252f89aae53cf0903eae0f55f5d8c0c6202fd Mon Sep 17 00:00:00 2001 From: Rob Mulholand Date: Mon, 26 Aug 2019 22:11:28 -0500 Subject: [PATCH 09/21] Remove extraneous migration formatting --- db/migrations/00001_create_blocks_table.sql | 35 +++++++------ ...02_create_full_sync_transactions_table.sql | 27 +++++----- .../00004_create_contracts_table.sql | 6 +-- db/migrations/00005_create_nodes_table.sql | 15 +++--- db/migrations/00006_add_node_fk_to_blocks.sql | 12 ++--- ...ock_number_unique_constraint_to_blocks.sql | 4 +- ..._tx_to_index_to_full_sync_transactions.sql | 2 +- ...x_from_index_to_full_sync_transactions.sql | 2 +- db/migrations/00017_create_log_filters.sql | 23 ++++----- ...update_log_filters_to_block_constraint.sql | 8 +-- db/migrations/00020_rename_node_table.sql | 51 ++++++++----------- .../00021_associate_receipts_with_blocks.sql | 36 ++++++------- ...022_add_eth_node_fingerprint_to_blocks.sql | 10 ++-- db/migrations/00023_create_headers_table.sql | 17 +++---- db/migrations/00025_create_queued_storage.sql | 17 +++---- ..._create_header_sync_transactions_table.sql | 29 +++++------ db/migrations/00028_create_uncles_table.sql | 23 ++++----- 17 files changed, 150 insertions(+), 167 deletions(-) diff --git a/db/migrations/00001_create_blocks_table.sql b/db/migrations/00001_create_blocks_table.sql index fb85c820..c11cf1de 100644 --- a/db/migrations/00001_create_blocks_table.sql +++ b/db/migrations/00001_create_blocks_table.sql @@ -1,22 +1,21 @@ -- +goose Up -CREATE TABLE public.blocks -( - id SERIAL PRIMARY KEY, - difficulty BIGINT, - extra_data VARCHAR, - gas_limit BIGINT, - gas_used BIGINT, - hash VARCHAR(66), - miner VARCHAR(42), - nonce VARCHAR(20), - "number" BIGINT, - parent_hash VARCHAR(66), - reward NUMERIC, - uncles_reward NUMERIC, - "size" VARCHAR, - "time" BIGINT, - is_final BOOLEAN, - uncle_hash VARCHAR(66) +CREATE TABLE public.blocks ( + id SERIAL PRIMARY KEY, + difficulty BIGINT, + extra_data VARCHAR, + gas_limit BIGINT, + gas_used BIGINT, + hash VARCHAR(66), + miner VARCHAR(42), + nonce VARCHAR(20), + "number" BIGINT, + parent_hash VARCHAR(66), + reward NUMERIC, + uncles_reward NUMERIC, + "size" VARCHAR, + "time" BIGINT, + is_final BOOLEAN, + uncle_hash VARCHAR(66) ); diff --git a/db/migrations/00002_create_full_sync_transactions_table.sql b/db/migrations/00002_create_full_sync_transactions_table.sql index 84c3d433..72548c95 100644 --- a/db/migrations/00002_create_full_sync_transactions_table.sql +++ b/db/migrations/00002_create_full_sync_transactions_table.sql @@ -1,18 +1,17 @@ -- +goose Up -CREATE TABLE full_sync_transactions -( - id SERIAL PRIMARY KEY, - block_id INTEGER NOT NULL REFERENCES blocks (id) ON DELETE CASCADE, - gas_limit NUMERIC, - gas_price NUMERIC, - hash VARCHAR(66), - input_data BYTEA, - nonce NUMERIC, - raw BYTEA, - tx_from VARCHAR(66), - tx_index INTEGER, - tx_to VARCHAR(66), - "value" NUMERIC +CREATE TABLE full_sync_transactions ( + id SERIAL PRIMARY KEY, + block_id INTEGER NOT NULL REFERENCES blocks(id) ON DELETE CASCADE, + gas_limit NUMERIC, + gas_price NUMERIC, + hash VARCHAR(66), + input_data BYTEA, + nonce NUMERIC, + raw BYTEA, + tx_from VARCHAR(66), + tx_index INTEGER, + tx_to VARCHAR(66), + "value" NUMERIC ); -- +goose Down diff --git a/db/migrations/00004_create_contracts_table.sql b/db/migrations/00004_create_contracts_table.sql index 9d288a79..3ff43bef 100644 --- a/db/migrations/00004_create_contracts_table.sql +++ b/db/migrations/00004_create_contracts_table.sql @@ -1,9 +1,9 @@ -- +goose Up CREATE TABLE watched_contracts ( - contract_id SERIAL PRIMARY KEY, - contract_abi json, - contract_hash VARCHAR(66) UNIQUE + contract_id SERIAL PRIMARY KEY, + contract_abi json, + contract_hash VARCHAR(66) UNIQUE ); -- +goose Down diff --git a/db/migrations/00005_create_nodes_table.sql b/db/migrations/00005_create_nodes_table.sql index 4a38a934..c2c0c7b8 100644 --- a/db/migrations/00005_create_nodes_table.sql +++ b/db/migrations/00005_create_nodes_table.sql @@ -1,12 +1,11 @@ -- +goose Up -CREATE TABLE nodes -( - id SERIAL PRIMARY KEY, - client_name VARCHAR, - genesis_block VARCHAR(66), - network_id NUMERIC, - node_id VARCHAR(128), - CONSTRAINT node_uc UNIQUE (genesis_block, network_id, node_id) +CREATE TABLE nodes ( + id SERIAL PRIMARY KEY, + client_name VARCHAR, + genesis_block VARCHAR(66), + network_id NUMERIC, + node_id VARCHAR(128), + CONSTRAINT node_uc UNIQUE (genesis_block, network_id, node_id) ); -- +goose Down diff --git a/db/migrations/00006_add_node_fk_to_blocks.sql b/db/migrations/00006_add_node_fk_to_blocks.sql index dc8e0545..4db8c03f 100644 --- a/db/migrations/00006_add_node_fk_to_blocks.sql +++ b/db/migrations/00006_add_node_fk_to_blocks.sql @@ -1,11 +1,11 @@ -- +goose Up ALTER TABLE blocks - ADD COLUMN node_id INTEGER NOT NULL, - ADD CONSTRAINT node_fk - FOREIGN KEY (node_id) - REFERENCES nodes (id) - ON DELETE CASCADE; + ADD COLUMN node_id INTEGER NOT NULL, + ADD CONSTRAINT node_fk +FOREIGN KEY (node_id) +REFERENCES nodes (id) +ON DELETE CASCADE; -- +goose Down ALTER TABLE blocks - DROP COLUMN node_id; + DROP COLUMN node_id; diff --git a/db/migrations/00008_add_node_block_number_unique_constraint_to_blocks.sql b/db/migrations/00008_add_node_block_number_unique_constraint_to_blocks.sql index 40bbbb7f..30be138d 100644 --- a/db/migrations/00008_add_node_block_number_unique_constraint_to_blocks.sql +++ b/db/migrations/00008_add_node_block_number_unique_constraint_to_blocks.sql @@ -1,7 +1,7 @@ -- +goose Up ALTER TABLE blocks - ADD CONSTRAINT node_id_block_number_uc UNIQUE (number, node_id); + ADD CONSTRAINT node_id_block_number_uc UNIQUE (number, node_id); -- +goose Down ALTER TABLE blocks - DROP CONSTRAINT node_id_block_number_uc; + DROP CONSTRAINT node_id_block_number_uc; diff --git a/db/migrations/00011_add_tx_to_index_to_full_sync_transactions.sql b/db/migrations/00011_add_tx_to_index_to_full_sync_transactions.sql index 51d66c12..cf2977d1 100644 --- a/db/migrations/00011_add_tx_to_index_to_full_sync_transactions.sql +++ b/db/migrations/00011_add_tx_to_index_to_full_sync_transactions.sql @@ -1,5 +1,5 @@ -- +goose Up -CREATE INDEX tx_to_index ON full_sync_transactions (tx_to); +CREATE INDEX tx_to_index ON full_sync_transactions(tx_to); -- +goose Down DROP INDEX tx_to_index; diff --git a/db/migrations/00012_add_tx_from_index_to_full_sync_transactions.sql b/db/migrations/00012_add_tx_from_index_to_full_sync_transactions.sql index 8db62a3d..fa6f0543 100644 --- a/db/migrations/00012_add_tx_from_index_to_full_sync_transactions.sql +++ b/db/migrations/00012_add_tx_from_index_to_full_sync_transactions.sql @@ -1,5 +1,5 @@ -- +goose Up -CREATE INDEX tx_from_index ON full_sync_transactions (tx_from); +CREATE INDEX tx_from_index ON full_sync_transactions(tx_from); -- +goose Down DROP INDEX tx_from_index; diff --git a/db/migrations/00017_create_log_filters.sql b/db/migrations/00017_create_log_filters.sql index 28772b01..0367f5e5 100644 --- a/db/migrations/00017_create_log_filters.sql +++ b/db/migrations/00017_create_log_filters.sql @@ -1,16 +1,15 @@ -- +goose Up -CREATE TABLE log_filters -( - id SERIAL, - name VARCHAR NOT NULL CHECK (name <> ''), - from_block BIGINT CHECK (from_block >= 0), - to_block BIGINT CHECK (from_block >= 0), - address VARCHAR(66), - topic0 VARCHAR(66), - topic1 VARCHAR(66), - topic2 VARCHAR(66), - topic3 VARCHAR(66), - CONSTRAINT name_uc UNIQUE (name) +CREATE TABLE log_filters ( + id SERIAL, + name VARCHAR NOT NULL CHECK (name <> ''), + from_block BIGINT CHECK (from_block >= 0), + to_block BIGINT CHECK (from_block >= 0), + address VARCHAR(66), + topic0 VARCHAR(66), + topic1 VARCHAR(66), + topic2 VARCHAR(66), + topic3 VARCHAR(66), + CONSTRAINT name_uc UNIQUE (name) ); -- +goose Down diff --git a/db/migrations/00019_update_log_filters_to_block_constraint.sql b/db/migrations/00019_update_log_filters_to_block_constraint.sql index 2d43618c..512a44db 100644 --- a/db/migrations/00019_update_log_filters_to_block_constraint.sql +++ b/db/migrations/00019_update_log_filters_to_block_constraint.sql @@ -1,14 +1,14 @@ -- +goose Up ALTER TABLE log_filters - DROP CONSTRAINT log_filters_from_block_check1; + DROP CONSTRAINT log_filters_from_block_check1; ALTER TABLE log_filters - ADD CONSTRAINT log_filters_to_block_check CHECK (to_block >= 0); + ADD CONSTRAINT log_filters_to_block_check CHECK (to_block >= 0); -- +goose Down ALTER TABLE log_filters - DROP CONSTRAINT log_filters_to_block_check; + DROP CONSTRAINT log_filters_to_block_check; ALTER TABLE log_filters - ADD CONSTRAINT log_filters_from_block_check1 CHECK (to_block >= 0); + ADD CONSTRAINT log_filters_from_block_check1 CHECK (to_block >= 0); diff --git a/db/migrations/00020_rename_node_table.sql b/db/migrations/00020_rename_node_table.sql index c1592823..061fda57 100644 --- a/db/migrations/00020_rename_node_table.sql +++ b/db/migrations/00020_rename_node_table.sql @@ -1,52 +1,43 @@ -- +goose Up -ALTER TABLE public.nodes - RENAME TO eth_nodes; +ALTER TABLE public.nodes RENAME TO eth_nodes; +ALTER TABLE public.eth_nodes RENAME COLUMN node_id TO eth_node_id; + +ALTER TABLE public.eth_nodes DROP CONSTRAINT node_uc; ALTER TABLE public.eth_nodes - RENAME COLUMN node_id TO eth_node_id; + ADD CONSTRAINT eth_node_uc UNIQUE (genesis_block, network_id, eth_node_id); -ALTER TABLE public.eth_nodes - DROP CONSTRAINT node_uc; -ALTER TABLE public.eth_nodes - ADD CONSTRAINT eth_node_uc UNIQUE (genesis_block, network_id, eth_node_id); +ALTER TABLE public.blocks RENAME COLUMN node_id TO eth_node_id; +ALTER TABLE public.blocks DROP CONSTRAINT node_id_block_number_uc; ALTER TABLE public.blocks - RENAME COLUMN node_id TO eth_node_id; + ADD CONSTRAINT eth_node_id_block_number_uc UNIQUE (number, eth_node_id); +ALTER TABLE public.blocks DROP CONSTRAINT node_fk; ALTER TABLE public.blocks - DROP CONSTRAINT node_id_block_number_uc; -ALTER TABLE public.blocks - ADD CONSTRAINT eth_node_id_block_number_uc UNIQUE (number, eth_node_id); - -ALTER TABLE public.blocks - DROP CONSTRAINT node_fk; -ALTER TABLE public.blocks - ADD CONSTRAINT node_fk - FOREIGN KEY (eth_node_id) REFERENCES eth_nodes (id) ON DELETE CASCADE; + ADD CONSTRAINT node_fk +FOREIGN KEY (eth_node_id) REFERENCES eth_nodes (id) ON DELETE CASCADE; -- +goose Down ALTER TABLE public.eth_nodes - RENAME TO nodes; + RENAME TO nodes; ALTER TABLE public.nodes - RENAME COLUMN eth_node_id TO node_id; + RENAME COLUMN eth_node_id TO node_id; ALTER TABLE public.nodes - DROP CONSTRAINT eth_node_uc; + DROP CONSTRAINT eth_node_uc; ALTER TABLE public.nodes - ADD CONSTRAINT node_uc UNIQUE (genesis_block, network_id, node_id); + ADD CONSTRAINT node_uc UNIQUE (genesis_block, network_id, node_id); -ALTER TABLE public.blocks - RENAME COLUMN eth_node_id TO node_id; +ALTER TABLE public.blocks RENAME COLUMN eth_node_id TO node_id; +ALTER TABLE public.blocks DROP CONSTRAINT eth_node_id_block_number_uc; ALTER TABLE public.blocks - DROP CONSTRAINT eth_node_id_block_number_uc; -ALTER TABLE public.blocks - ADD CONSTRAINT node_id_block_number_uc UNIQUE (number, node_id); + ADD CONSTRAINT node_id_block_number_uc UNIQUE (number, node_id); +ALTER TABLE public.blocks DROP CONSTRAINT node_fk; ALTER TABLE public.blocks - DROP CONSTRAINT node_fk; -ALTER TABLE public.blocks - ADD CONSTRAINT node_fk - FOREIGN KEY (node_id) REFERENCES nodes (id) ON DELETE CASCADE; + ADD CONSTRAINT node_fk +FOREIGN KEY (node_id) REFERENCES nodes (id) ON DELETE CASCADE; diff --git a/db/migrations/00021_associate_receipts_with_blocks.sql b/db/migrations/00021_associate_receipts_with_blocks.sql index d8d3934a..b60aa2d4 100644 --- a/db/migrations/00021_associate_receipts_with_blocks.sql +++ b/db/migrations/00021_associate_receipts_with_blocks.sql @@ -1,44 +1,44 @@ -- +goose Up ALTER TABLE full_sync_receipts - ADD COLUMN block_id INT; + ADD COLUMN block_id INT; UPDATE full_sync_receipts -SET block_id = ( + SET block_id = ( SELECT block_id FROM full_sync_transactions WHERE full_sync_transactions.id = full_sync_receipts.transaction_id -); + ); ALTER TABLE full_sync_receipts - ALTER COLUMN block_id SET NOT NULL; + ALTER COLUMN block_id SET NOT NULL; ALTER TABLE full_sync_receipts - ADD CONSTRAINT blocks_fk - FOREIGN KEY (block_id) - REFERENCES blocks (id) - ON DELETE CASCADE; + ADD CONSTRAINT blocks_fk +FOREIGN KEY (block_id) +REFERENCES blocks (id) +ON DELETE CASCADE; ALTER TABLE full_sync_receipts - DROP COLUMN transaction_id; + DROP COLUMN transaction_id; -- +goose Down ALTER TABLE full_sync_receipts - ADD COLUMN transaction_id INT; + ADD COLUMN transaction_id INT; CREATE INDEX transaction_id_index ON full_sync_receipts (transaction_id); UPDATE full_sync_receipts -SET transaction_id = ( + SET transaction_id = ( SELECT id FROM full_sync_transactions WHERE full_sync_transactions.hash = full_sync_receipts.tx_hash -); + ); ALTER TABLE full_sync_receipts - ALTER COLUMN transaction_id SET NOT NULL; + ALTER COLUMN transaction_id SET NOT NULL; ALTER TABLE full_sync_receipts - ADD CONSTRAINT transaction_fk - FOREIGN KEY (transaction_id) - REFERENCES full_sync_transactions (id) - ON DELETE CASCADE; + ADD CONSTRAINT transaction_fk +FOREIGN KEY (transaction_id) +REFERENCES full_sync_transactions (id) +ON DELETE CASCADE; ALTER TABLE full_sync_receipts - DROP COLUMN block_id; + DROP COLUMN block_id; diff --git a/db/migrations/00022_add_eth_node_fingerprint_to_blocks.sql b/db/migrations/00022_add_eth_node_fingerprint_to_blocks.sql index b295abc8..5c48c03e 100644 --- a/db/migrations/00022_add_eth_node_fingerprint_to_blocks.sql +++ b/db/migrations/00022_add_eth_node_fingerprint_to_blocks.sql @@ -1,16 +1,16 @@ -- +goose Up ALTER TABLE blocks - ADD COLUMN eth_node_fingerprint VARCHAR(128); + ADD COLUMN eth_node_fingerprint VARCHAR(128); UPDATE blocks -SET eth_node_fingerprint = ( + SET eth_node_fingerprint = ( SELECT eth_node_id FROM eth_nodes WHERE eth_nodes.id = blocks.eth_node_id -); + ); ALTER TABLE blocks - ALTER COLUMN eth_node_fingerprint SET NOT NULL; + ALTER COLUMN eth_node_fingerprint SET NOT NULL; -- +goose Down ALTER TABLE blocks - DROP COLUMN eth_node_fingerprint; + DROP COLUMN eth_node_fingerprint; diff --git a/db/migrations/00023_create_headers_table.sql b/db/migrations/00023_create_headers_table.sql index 2c8f30a1..925c202b 100644 --- a/db/migrations/00023_create_headers_table.sql +++ b/db/migrations/00023_create_headers_table.sql @@ -1,13 +1,12 @@ -- +goose Up -CREATE TABLE public.headers -( - id SERIAL PRIMARY KEY, - hash VARCHAR(66), - block_number BIGINT, - raw JSONB, - block_timestamp NUMERIC, - eth_node_id INTEGER NOT NULL REFERENCES eth_nodes (id) ON DELETE CASCADE, - eth_node_fingerprint VARCHAR(128) +CREATE TABLE public.headers ( + id SERIAL PRIMARY KEY, + hash VARCHAR(66), + block_number BIGINT, + raw JSONB, + block_timestamp NUMERIC, + eth_node_id INTEGER NOT NULL REFERENCES eth_nodes (id) ON DELETE CASCADE, + eth_node_fingerprint VARCHAR(128) ); -- Index is removed when table is diff --git a/db/migrations/00025_create_queued_storage.sql b/db/migrations/00025_create_queued_storage.sql index 71ab8886..79a3a548 100644 --- a/db/migrations/00025_create_queued_storage.sql +++ b/db/migrations/00025_create_queued_storage.sql @@ -1,13 +1,12 @@ -- +goose Up -CREATE TABLE public.queued_storage -( - id SERIAL PRIMARY KEY, - block_height BIGINT, - block_hash BYTEA, - contract BYTEA, - storage_key BYTEA, - storage_value BYTEA, - UNIQUE (block_height, block_hash, contract, storage_key, storage_value) +CREATE TABLE public.queued_storage ( + id SERIAL PRIMARY KEY, + block_height BIGINT, + block_hash BYTEA, + contract BYTEA, + storage_key BYTEA, + storage_value BYTEA, + UNIQUE (block_height, block_hash, contract, storage_key, storage_value) ); -- +goose Down diff --git a/db/migrations/00026_create_header_sync_transactions_table.sql b/db/migrations/00026_create_header_sync_transactions_table.sql index 358ce65c..469f8f85 100644 --- a/db/migrations/00026_create_header_sync_transactions_table.sql +++ b/db/migrations/00026_create_header_sync_transactions_table.sql @@ -1,19 +1,18 @@ -- +goose Up -CREATE TABLE header_sync_transactions -( - id SERIAL PRIMARY KEY, - header_id INTEGER NOT NULL REFERENCES headers (id) ON DELETE CASCADE, - hash VARCHAR(66), - gas_limit NUMERIC, - gas_price NUMERIC, - input_data BYTEA, - nonce NUMERIC, - raw BYTEA, - tx_from VARCHAR(44), - tx_index INTEGER, - tx_to VARCHAR(44), - "value" NUMERIC, - UNIQUE (header_id, hash) +CREATE TABLE header_sync_transactions ( + id SERIAL PRIMARY KEY, + header_id INTEGER NOT NULL REFERENCES headers(id) ON DELETE CASCADE, + hash VARCHAR(66), + gas_limit NUMERIC, + gas_price NUMERIC, + input_data BYTEA, + nonce NUMERIC, + raw BYTEA, + tx_from VARCHAR(44), + tx_index INTEGER, + tx_to VARCHAR(44), + "value" NUMERIC, + UNIQUE (header_id, hash) ); -- +goose Down diff --git a/db/migrations/00028_create_uncles_table.sql b/db/migrations/00028_create_uncles_table.sql index 703d50f2..9ec0ffa7 100644 --- a/db/migrations/00028_create_uncles_table.sql +++ b/db/migrations/00028_create_uncles_table.sql @@ -1,16 +1,15 @@ -- +goose Up -CREATE TABLE public.uncles -( - id SERIAL PRIMARY KEY, - hash VARCHAR(66) NOT NULL, - block_id INTEGER NOT NULL REFERENCES blocks (id) ON DELETE CASCADE, - reward NUMERIC NOT NULL, - miner VARCHAR(42) NOT NULL, - raw JSONB, - block_timestamp NUMERIC, - eth_node_id INTEGER NOT NULL REFERENCES eth_nodes (id) ON DELETE CASCADE, - eth_node_fingerprint VARCHAR(128), - UNIQUE (block_id, hash) +CREATE TABLE public.uncles ( + id SERIAL PRIMARY KEY, + hash VARCHAR(66) NOT NULL, + block_id INTEGER NOT NULL REFERENCES blocks (id) ON DELETE CASCADE, + reward NUMERIC NOT NULL, + miner VARCHAR(42) NOT NULL, + raw JSONB, + block_timestamp NUMERIC, + eth_node_id INTEGER NOT NULL REFERENCES eth_nodes (id) ON DELETE CASCADE, + eth_node_fingerprint VARCHAR(128), + UNIQUE (block_id, hash) ); -- +goose Down From 5ac76eee74d0483ef06b8d5c079ebca44ee25f87 Mon Sep 17 00:00:00 2001 From: Rob Mulholand Date: Tue, 27 Aug 2019 16:30:50 -0500 Subject: [PATCH 10/21] Make check_count a column on public.headers - Don't need to maintain it on public.checked_headers if we're not adding additional columns to that table --- db/migrations/00023_create_headers_table.sql | 18 +++---- .../00024_create_checked_headers_table.sql | 8 ++-- ...ql => 00030_create_checked_logs_table.sql} | 2 +- db/schema.sql | 4 +- libraries/shared/logs/extractor.go | 24 +++++----- .../checked_headers_repository.go | 30 ++++++------ .../checked_headers_repository_test.go | 48 +++++++++---------- pkg/datastore/repository.go | 2 +- pkg/fakes/mock_checked_headers_repository.go | 2 +- 9 files changed, 68 insertions(+), 70 deletions(-) rename db/migrations/{00029_create_checked_logs_table.sql => 00030_create_checked_logs_table.sql} (90%) diff --git a/db/migrations/00023_create_headers_table.sql b/db/migrations/00023_create_headers_table.sql index 925c202b..da539de8 100644 --- a/db/migrations/00023_create_headers_table.sql +++ b/db/migrations/00023_create_headers_table.sql @@ -1,12 +1,14 @@ -- +goose Up -CREATE TABLE public.headers ( - id SERIAL PRIMARY KEY, - hash VARCHAR(66), - block_number BIGINT, - raw JSONB, - block_timestamp NUMERIC, - eth_node_id INTEGER NOT NULL REFERENCES eth_nodes (id) ON DELETE CASCADE, - eth_node_fingerprint VARCHAR(128) +CREATE TABLE public.headers +( + id SERIAL PRIMARY KEY, + hash VARCHAR(66), + block_number BIGINT, + raw JSONB, + block_timestamp NUMERIC, + check_count INTEGER NOT NULL DEFAULT 0, + eth_node_id INTEGER NOT NULL REFERENCES eth_nodes (id) ON DELETE CASCADE, + eth_node_fingerprint VARCHAR(128) ); -- Index is removed when table is diff --git a/db/migrations/00024_create_checked_headers_table.sql b/db/migrations/00024_create_checked_headers_table.sql index 95cedf22..acf0fbdb 100644 --- a/db/migrations/00024_create_checked_headers_table.sql +++ b/db/migrations/00024_create_checked_headers_table.sql @@ -1,9 +1,7 @@ -- +goose Up -CREATE TABLE public.checked_headers -( - id SERIAL PRIMARY KEY, - header_id INTEGER UNIQUE NOT NULL REFERENCES headers (id) ON DELETE CASCADE, - check_count INTEGER NOT NULL DEFAULT 1 +CREATE TABLE public.checked_headers ( + id SERIAL PRIMARY KEY, + header_id INTEGER UNIQUE NOT NULL REFERENCES headers (id) ON DELETE CASCADE ); -- +goose Down diff --git a/db/migrations/00029_create_checked_logs_table.sql b/db/migrations/00030_create_checked_logs_table.sql similarity index 90% rename from db/migrations/00029_create_checked_logs_table.sql rename to db/migrations/00030_create_checked_logs_table.sql index 91445cd9..1a77560a 100644 --- a/db/migrations/00029_create_checked_logs_table.sql +++ b/db/migrations/00030_create_checked_logs_table.sql @@ -9,4 +9,4 @@ CREATE TABLE public.checked_logs -- +goose Down -- SQL in this section is executed when the migration is rolled back. -DROP TABLE public.checked_logs; \ No newline at end of file +DROP TABLE public.checked_logs; diff --git a/db/schema.sql b/db/schema.sql index 16791a83..f78a8d1c 100644 --- a/db/schema.sql +++ b/db/schema.sql @@ -131,8 +131,7 @@ ALTER SEQUENCE public.blocks_id_seq OWNED BY public.blocks.id; CREATE TABLE public.checked_headers ( id integer NOT NULL, - header_id integer NOT NULL, - check_count integer DEFAULT 1 NOT NULL + header_id integer NOT NULL ); @@ -456,6 +455,7 @@ CREATE TABLE public.headers ( block_number bigint, raw jsonb, block_timestamp numeric, + check_count integer DEFAULT 0 NOT NULL, eth_node_id integer NOT NULL, eth_node_fingerprint character varying(128) ); diff --git a/libraries/shared/logs/extractor.go b/libraries/shared/logs/extractor.go index aed39d02..3e4df88a 100644 --- a/libraries/shared/logs/extractor.go +++ b/libraries/shared/logs/extractor.go @@ -77,17 +77,17 @@ func (extractor LogExtractor) ExtractLogs(recheckHeaders constants.TransformerEx return ErrNoWatchedAddresses, noMissingHeadersFound } - missingHeaders, missingHeadersErr := extractor.CheckedHeadersRepository.MissingHeaders(*extractor.StartingBlock, -1, getCheckCount(recheckHeaders)) - if missingHeadersErr != nil { - logrus.Errorf("error fetching missing headers: %s", missingHeadersErr) - return missingHeadersErr, noMissingHeadersFound + uncheckedHeaders, uncheckedHeadersErr := extractor.CheckedHeadersRepository.UncheckedHeaders(*extractor.StartingBlock, -1, getCheckCount(recheckHeaders)) + if uncheckedHeadersErr != nil { + logrus.Errorf("error fetching missing headers: %s", uncheckedHeadersErr) + return uncheckedHeadersErr, noMissingHeadersFound } - if len(missingHeaders) < 1 { + if len(uncheckedHeaders) < 1 { return nil, noMissingHeadersFound } - for _, header := range missingHeaders { + for _, header := range uncheckedHeaders { logs, fetchLogsErr := extractor.Fetcher.FetchLogs(extractor.Addresses, extractor.Topics, header) if fetchLogsErr != nil { logError("error fetching logs for header: %s", fetchLogsErr, header) @@ -143,13 +143,13 @@ func (extractor *LogExtractor) updateCheckedHeaders(config transformer.EventTran return hasBeenCheckedErr } if !hasBeenChecked { - err := extractor.CheckedHeadersRepository.MarkHeadersUnchecked(config.StartingBlockNumber) - if err != nil { - return err + uncheckHeadersErr := extractor.CheckedHeadersRepository.MarkHeadersUnchecked(config.StartingBlockNumber) + if uncheckHeadersErr != nil { + return uncheckHeadersErr } - nextErr := extractor.CheckedLogsRepository.MarkLogsChecked(config.ContractAddresses, config.Topic) - if nextErr != nil { - return nextErr + markLogsCheckedErr := extractor.CheckedLogsRepository.MarkLogsChecked(config.ContractAddresses, config.Topic) + if markLogsCheckedErr != nil { + return markLogsCheckedErr } } return nil diff --git a/pkg/datastore/postgres/repositories/checked_headers_repository.go b/pkg/datastore/postgres/repositories/checked_headers_repository.go index 1e33e425..8d870461 100644 --- a/pkg/datastore/postgres/repositories/checked_headers_repository.go +++ b/pkg/datastore/postgres/repositories/checked_headers_repository.go @@ -22,9 +22,7 @@ import ( ) const ( - insertCheckedHeaderQuery = `INSERT INTO public.checked_headers (header_id) VALUES ($1) - ON CONFLICT (header_id) DO UPDATE - SET check_count = (SELECT check_count FROM public.checked_headers WHERE header_id = $1) + 1` + insertCheckedHeaderQuery = `UPDATE public.headers SET check_count = (SELECT check_count WHERE id = $1) + 1 WHERE id = $1` ) type CheckedHeadersRepository struct { @@ -43,30 +41,30 @@ func (repo CheckedHeadersRepository) MarkHeaderChecked(headerID int64) error { // Remove checked_headers rows with block number >= starting block number func (repo CheckedHeadersRepository) MarkHeadersUnchecked(startingBlockNumber int64) error { - _, err := repo.db.Exec(`DELETE FROM public.checked_headers WHERE header_id IN (SELECT id FROM public.headers WHERE block_number >= $1)`, startingBlockNumber) + _, err := repo.db.Exec(`UPDATE public.headers SET check_count = 0 WHERE block_number >= $1`, startingBlockNumber) return err } // Return header_id if not present in checked_headers or its check_count is < passed checkCount -func (repo CheckedHeadersRepository) MissingHeaders(startingBlockNumber, endingBlockNumber, checkCount int64) ([]core.Header, error) { +func (repo CheckedHeadersRepository) UncheckedHeaders(startingBlockNumber, endingBlockNumber, checkCount int64) ([]core.Header, error) { var result []core.Header var query string var err error if endingBlockNumber == -1 { - query = `SELECT headers.id, headers.block_number, headers.hash FROM headers - LEFT JOIN checked_headers on headers.id = header_id - WHERE (header_id ISNULL OR check_count < $2) - AND headers.block_number >= $1 - AND headers.eth_node_fingerprint = $3` + query = `SELECT id, block_number, hash + FROM headers + WHERE check_count < $2 + AND block_number >= $1 + AND eth_node_fingerprint = $3` err = repo.db.Select(&result, query, startingBlockNumber, checkCount, repo.db.Node.ID) } else { - query = `SELECT headers.id, headers.block_number, headers.hash FROM headers - LEFT JOIN checked_headers on headers.id = header_id - WHERE (header_id ISNULL OR check_count < $3) - AND headers.block_number >= $1 - AND headers.block_number <= $2 - AND headers.eth_node_fingerprint = $4` + query = `SELECT id, block_number, hash + FROM headers + WHERE check_count < $3 + AND block_number >= $1 + AND block_number <= $2 + AND eth_node_fingerprint = $4` err = repo.db.Select(&result, query, startingBlockNumber, endingBlockNumber, checkCount, repo.db.Node.ID) } diff --git a/pkg/datastore/postgres/repositories/checked_headers_repository_test.go b/pkg/datastore/postgres/repositories/checked_headers_repository_test.go index 5ee5780b..b2c9948a 100644 --- a/pkg/datastore/postgres/repositories/checked_headers_repository_test.go +++ b/pkg/datastore/postgres/repositories/checked_headers_repository_test.go @@ -50,7 +50,7 @@ var _ = Describe("Checked Headers repository", func() { Expect(err).NotTo(HaveOccurred()) var checkedCount int - fetchErr := db.Get(&checkedCount, `SELECT check_count FROM public.checked_headers WHERE header_id = $1`, headerID) + fetchErr := db.Get(&checkedCount, `SELECT check_count FROM public.headers WHERE id = $1`, headerID) Expect(fetchErr).NotTo(HaveOccurred()) Expect(checkedCount).To(Equal(1)) }) @@ -67,7 +67,7 @@ var _ = Describe("Checked Headers repository", func() { Expect(updateErr).NotTo(HaveOccurred()) var checkedCount int - fetchErr := db.Get(&checkedCount, `SELECT check_count FROM public.checked_headers WHERE header_id = $1`, headerID) + fetchErr := db.Get(&checkedCount, `SELECT check_count FROM public.headers WHERE id = $1`, headerID) Expect(fetchErr).NotTo(HaveOccurred()) Expect(checkedCount).To(Equal(2)) }) @@ -101,20 +101,20 @@ var _ = Describe("Checked Headers repository", func() { err := repo.MarkHeadersUnchecked(blockNumberTwo) Expect(err).NotTo(HaveOccurred()) - var headerOneChecked, headerTwoChecked, headerThreeChecked bool - getHeaderOneErr := db.Get(&headerOneChecked, `SELECT EXISTS(SELECT 1 FROM public.checked_headers WHERE header_id = $1)`, headerIdOne) + var headerOneCheckCount, headerTwoCheckCount, headerThreeCheckCount int + getHeaderOneErr := db.Get(&headerOneCheckCount, `SELECT check_count FROM public.headers WHERE id = $1`, headerIdOne) Expect(getHeaderOneErr).NotTo(HaveOccurred()) - Expect(headerOneChecked).To(BeTrue()) - getHeaderTwoErr := db.Get(&headerTwoChecked, `SELECT EXISTS(SELECT 1 FROM public.checked_headers WHERE header_id = $1)`, headerIdTwo) + Expect(headerOneCheckCount).To(Equal(1)) + getHeaderTwoErr := db.Get(&headerTwoCheckCount, `SELECT check_count FROM public.headers WHERE id = $1`, headerIdTwo) Expect(getHeaderTwoErr).NotTo(HaveOccurred()) - Expect(headerTwoChecked).To(BeFalse()) - getHeaderThreeErr := db.Get(&headerThreeChecked, `SELECT EXISTS(SELECT 1 FROM public.checked_headers WHERE header_id = $1)`, headerIdThree) + Expect(headerTwoCheckCount).To(BeZero()) + getHeaderThreeErr := db.Get(&headerThreeCheckCount, `SELECT check_count FROM public.headers WHERE id = $1`, headerIdThree) Expect(getHeaderThreeErr).NotTo(HaveOccurred()) - Expect(headerThreeChecked).To(BeFalse()) + Expect(headerThreeCheckCount).To(BeZero()) }) }) - Describe("MissingHeaders", func() { + Describe("UncheckedHeaders", func() { var ( headerRepository datastore.HeaderRepository startingBlockNumber int64 @@ -151,7 +151,7 @@ var _ = Describe("Checked Headers repository", func() { Describe("when ending block is specified", func() { It("excludes headers that are out of range", func() { - headers, err := repo.MissingHeaders(startingBlockNumber, endingBlockNumber, uncheckedCheckCount) + headers, err := repo.UncheckedHeaders(startingBlockNumber, endingBlockNumber, uncheckedCheckCount) Expect(err).NotTo(HaveOccurred()) // doesn't include outOfRangeBlockNumber @@ -162,10 +162,10 @@ var _ = Describe("Checked Headers repository", func() { }) It("excludes headers that have been checked more than the check count", func() { - _, err = db.Exec(`INSERT INTO public.checked_headers (header_id) VALUES ($1)`, headerIDs[1]) + _, err = db.Exec(`UPDATE public.headers SET check_count = 1 WHERE id = $1`, headerIDs[1]) Expect(err).NotTo(HaveOccurred()) - headers, err := repo.MissingHeaders(startingBlockNumber, endingBlockNumber, uncheckedCheckCount) + headers, err := repo.UncheckedHeaders(startingBlockNumber, endingBlockNumber, uncheckedCheckCount) Expect(err).NotTo(HaveOccurred()) // doesn't include middleBlockNumber @@ -175,10 +175,10 @@ var _ = Describe("Checked Headers repository", func() { }) It("does not exclude headers that have been checked less than the check count", func() { - _, err = db.Exec(`INSERT INTO public.checked_headers (header_id) VALUES ($1)`, headerIDs[1]) + _, err = db.Exec(`UPDATE public.headers SET check_count = 1 WHERE id = $1`, headerIDs[1]) Expect(err).NotTo(HaveOccurred()) - headers, err := repo.MissingHeaders(startingBlockNumber, endingBlockNumber, recheckCheckCount) + headers, err := repo.UncheckedHeaders(startingBlockNumber, endingBlockNumber, recheckCheckCount) Expect(err).NotTo(HaveOccurred()) Expect(len(headers)).To(Equal(3)) @@ -197,14 +197,14 @@ var _ = Describe("Checked Headers repository", func() { } Expect(err).NotTo(HaveOccurred()) - nodeOneMissingHeaders, err := repo.MissingHeaders(startingBlockNumber, endingBlockNumber, uncheckedCheckCount) + nodeOneMissingHeaders, err := repo.UncheckedHeaders(startingBlockNumber, endingBlockNumber, uncheckedCheckCount) Expect(err).NotTo(HaveOccurred()) Expect(len(nodeOneMissingHeaders)).To(Equal(3)) Expect(nodeOneMissingHeaders[0].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(middleBlockNumber), Equal(endingBlockNumber))) Expect(nodeOneMissingHeaders[1].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(middleBlockNumber), Equal(endingBlockNumber))) Expect(nodeOneMissingHeaders[2].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(middleBlockNumber), Equal(endingBlockNumber))) - nodeTwoMissingHeaders, err := repoTwo.MissingHeaders(startingBlockNumber, endingBlockNumber+10, uncheckedCheckCount) + nodeTwoMissingHeaders, err := repoTwo.UncheckedHeaders(startingBlockNumber, endingBlockNumber+10, uncheckedCheckCount) Expect(err).NotTo(HaveOccurred()) Expect(len(nodeTwoMissingHeaders)).To(Equal(3)) Expect(nodeTwoMissingHeaders[0].BlockNumber).To(Or(Equal(startingBlockNumber+10), Equal(middleBlockNumber+10), Equal(endingBlockNumber+10))) @@ -217,7 +217,7 @@ var _ = Describe("Checked Headers repository", func() { var endingBlock = int64(-1) It("includes all non-checked headers when ending block is -1 ", func() { - headers, err := repo.MissingHeaders(startingBlockNumber, endingBlock, uncheckedCheckCount) + headers, err := repo.UncheckedHeaders(startingBlockNumber, endingBlock, uncheckedCheckCount) Expect(err).NotTo(HaveOccurred()) Expect(len(headers)).To(Equal(4)) @@ -228,10 +228,10 @@ var _ = Describe("Checked Headers repository", func() { }) It("excludes headers that have been checked more than the check count", func() { - _, err = db.Exec(`INSERT INTO public.checked_headers (header_id) VALUES ($1)`, headerIDs[1]) + _, err = db.Exec(`UPDATE public.headers SET check_count = 1 WHERE id = $1`, headerIDs[1]) Expect(err).NotTo(HaveOccurred()) - headers, err := repo.MissingHeaders(startingBlockNumber, endingBlock, uncheckedCheckCount) + headers, err := repo.UncheckedHeaders(startingBlockNumber, endingBlock, uncheckedCheckCount) Expect(err).NotTo(HaveOccurred()) // doesn't include middleBlockNumber @@ -242,10 +242,10 @@ var _ = Describe("Checked Headers repository", func() { }) It("does not exclude headers that have been checked less than the check count", func() { - _, err = db.Exec(`INSERT INTO public.checked_headers (header_id) VALUES ($1)`, headerIDs[1]) + _, err = db.Exec(`UPDATE public.headers SET check_count = 1 WHERE id = $1`, headerIDs[1]) Expect(err).NotTo(HaveOccurred()) - headers, err := repo.MissingHeaders(startingBlockNumber, endingBlock, recheckCheckCount) + headers, err := repo.UncheckedHeaders(startingBlockNumber, endingBlock, recheckCheckCount) Expect(err).NotTo(HaveOccurred()) Expect(len(headers)).To(Equal(4)) @@ -265,7 +265,7 @@ var _ = Describe("Checked Headers repository", func() { } Expect(err).NotTo(HaveOccurred()) - nodeOneMissingHeaders, err := repo.MissingHeaders(startingBlockNumber, endingBlock, uncheckedCheckCount) + nodeOneMissingHeaders, err := repo.UncheckedHeaders(startingBlockNumber, endingBlock, uncheckedCheckCount) Expect(err).NotTo(HaveOccurred()) Expect(len(nodeOneMissingHeaders)).To(Equal(4)) Expect(nodeOneMissingHeaders[0].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(middleBlockNumber), Equal(endingBlockNumber), Equal(outOfRangeBlockNumber))) @@ -273,7 +273,7 @@ var _ = Describe("Checked Headers repository", func() { Expect(nodeOneMissingHeaders[2].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(middleBlockNumber), Equal(endingBlockNumber), Equal(outOfRangeBlockNumber))) Expect(nodeOneMissingHeaders[3].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(middleBlockNumber), Equal(endingBlockNumber), Equal(outOfRangeBlockNumber))) - nodeTwoMissingHeaders, err := repoTwo.MissingHeaders(startingBlockNumber, endingBlock, uncheckedCheckCount) + nodeTwoMissingHeaders, err := repoTwo.UncheckedHeaders(startingBlockNumber, endingBlock, uncheckedCheckCount) Expect(err).NotTo(HaveOccurred()) Expect(len(nodeTwoMissingHeaders)).To(Equal(4)) Expect(nodeTwoMissingHeaders[0].BlockNumber).To(Or(Equal(startingBlockNumber+10), Equal(middleBlockNumber+10), Equal(endingBlockNumber+10), Equal(outOfRangeBlockNumber+10))) diff --git a/pkg/datastore/repository.go b/pkg/datastore/repository.go index f762bb12..ff0f02dd 100644 --- a/pkg/datastore/repository.go +++ b/pkg/datastore/repository.go @@ -37,7 +37,7 @@ type BlockRepository interface { type CheckedHeadersRepository interface { MarkHeaderChecked(headerID int64) error MarkHeadersUnchecked(startingBlockNumber int64) error - MissingHeaders(startingBlockNumber, endingBlockNumber, checkCount int64) ([]core.Header, error) + UncheckedHeaders(startingBlockNumber, endingBlockNumber, checkCount int64) ([]core.Header, error) } type CheckedLogsRepository interface { diff --git a/pkg/fakes/mock_checked_headers_repository.go b/pkg/fakes/mock_checked_headers_repository.go index 687bf160..9781829c 100644 --- a/pkg/fakes/mock_checked_headers_repository.go +++ b/pkg/fakes/mock_checked_headers_repository.go @@ -44,7 +44,7 @@ func (repository *MockCheckedHeadersRepository) MarkHeaderChecked(headerID int64 return repository.MarkHeaderCheckedReturnError } -func (repository *MockCheckedHeadersRepository) MissingHeaders(startingBlockNumber, endingBlockNumber, checkCount int64) ([]core.Header, error) { +func (repository *MockCheckedHeadersRepository) UncheckedHeaders(startingBlockNumber, endingBlockNumber, checkCount int64) ([]core.Header, error) { repository.MissingHeadersStartingBlockNumber = startingBlockNumber repository.MissingHeadersEndingBlockNumber = endingBlockNumber repository.MissingHeadersCheckCount = checkCount From b9f3b9f9465988cf75ce8e4efffb8931f6342c2d Mon Sep 17 00:00:00 2001 From: Rob Mulholand Date: Wed, 28 Aug 2019 10:41:34 -0500 Subject: [PATCH 11/21] Reference header sync logs address via foreign key --- .../00029_create_header_sync_logs_table.sql | 2 +- db/schema.sql | 10 ++++++- .../checked_headers_repository_test.go | 20 +++++++------- .../checked_logs_repository_test.go | 5 ++++ .../header_sync_log_repository.go | 26 ++++++++++++++----- .../header_sync_log_repository_test.go | 25 +++++++++++++----- 6 files changed, 63 insertions(+), 25 deletions(-) diff --git a/db/migrations/00029_create_header_sync_logs_table.sql b/db/migrations/00029_create_header_sync_logs_table.sql index 83313ca7..4a6ec73b 100644 --- a/db/migrations/00029_create_header_sync_logs_table.sql +++ b/db/migrations/00029_create_header_sync_logs_table.sql @@ -4,7 +4,7 @@ CREATE TABLE header_sync_logs ( id SERIAL PRIMARY KEY, header_id INTEGER NOT NULL REFERENCES headers (id) ON DELETE CASCADE, - address VARCHAR(66), + address INTEGER NOT NULL REFERENCES addresses (id) ON DELETE CASCADE, topics BYTEA[], data BYTEA, block_number BIGINT, diff --git a/db/schema.sql b/db/schema.sql index f78a8d1c..0a5bb3a1 100644 --- a/db/schema.sql +++ b/db/schema.sql @@ -334,7 +334,7 @@ ALTER SEQUENCE public.goose_db_version_id_seq OWNED BY public.goose_db_version.i CREATE TABLE public.header_sync_logs ( id integer NOT NULL, header_id integer NOT NULL, - address character varying(66), + address integer NOT NULL, topics bytea[], data bytea, block_number bigint, @@ -1075,6 +1075,14 @@ ALTER TABLE ONLY public.full_sync_transactions ADD CONSTRAINT full_sync_transactions_block_id_fkey FOREIGN KEY (block_id) REFERENCES public.blocks(id) ON DELETE CASCADE; +-- +-- Name: header_sync_logs header_sync_logs_address_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.header_sync_logs + ADD CONSTRAINT header_sync_logs_address_fkey FOREIGN KEY (address) REFERENCES public.addresses(id) ON DELETE CASCADE; + + -- -- Name: header_sync_logs header_sync_logs_header_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - -- diff --git a/pkg/datastore/postgres/repositories/checked_headers_repository_test.go b/pkg/datastore/postgres/repositories/checked_headers_repository_test.go index b2c9948a..613432b7 100644 --- a/pkg/datastore/postgres/repositories/checked_headers_repository_test.go +++ b/pkg/datastore/postgres/repositories/checked_headers_repository_test.go @@ -34,13 +34,18 @@ var _ = Describe("Checked Headers repository", func() { repo datastore.CheckedHeadersRepository ) - Describe("MarkHeaderChecked", func() { - BeforeEach(func() { - db = test_config.NewTestDB(test_config.NewTestNode()) - test_config.CleanTestDB(db) - repo = repositories.NewCheckedHeadersRepository(db) - }) + BeforeEach(func() { + db = test_config.NewTestDB(test_config.NewTestNode()) + test_config.CleanTestDB(db) + repo = repositories.NewCheckedHeadersRepository(db) + }) + AfterEach(func() { + closeErr := db.Close() + Expect(closeErr).NotTo(HaveOccurred()) + }) + + Describe("MarkHeaderChecked", func() { It("marks passed header as checked on insert", func() { headerRepository := repositories.NewHeaderRepository(db) headerID, headerErr := headerRepository.CreateOrUpdateHeader(fakes.FakeHeader) @@ -129,10 +134,7 @@ var _ = Describe("Checked Headers repository", func() { ) BeforeEach(func() { - db = test_config.NewTestDB(test_config.NewTestNode()) - test_config.CleanTestDB(db) headerRepository = repositories.NewHeaderRepository(db) - repo = repositories.NewCheckedHeadersRepository(db) startingBlockNumber = rand.Int63() middleBlockNumber = startingBlockNumber + 1 diff --git a/pkg/datastore/postgres/repositories/checked_logs_repository_test.go b/pkg/datastore/postgres/repositories/checked_logs_repository_test.go index 351bbcaf..8597d5fe 100644 --- a/pkg/datastore/postgres/repositories/checked_logs_repository_test.go +++ b/pkg/datastore/postgres/repositories/checked_logs_repository_test.go @@ -42,6 +42,11 @@ var _ = Describe("Checked logs repository", func() { repository = repositories.NewCheckedLogsRepository(db) }) + AfterEach(func() { + closeErr := db.Close() + Expect(closeErr).NotTo(HaveOccurred()) + }) + Describe("HaveLogsBeenChecked", func() { It("returns true if all addresses and the topic0 are already present in the db", func() { _, insertErr := db.Exec(`INSERT INTO public.checked_logs (contract_address, topic_zero) VALUES ($1, $2)`, fakeAddress, fakeTopicZero) diff --git a/pkg/datastore/postgres/repositories/header_sync_log_repository.go b/pkg/datastore/postgres/repositories/header_sync_log_repository.go index fef7e6b3..90aceab5 100644 --- a/pkg/datastore/postgres/repositories/header_sync_log_repository.go +++ b/pkg/datastore/postgres/repositories/header_sync_log_repository.go @@ -31,17 +31,21 @@ const insertHeaderSyncLogQuery = `INSERT INTO header_sync_logs VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10) ON CONFLICT DO NOTHING` type HeaderSyncLogRepository struct { - db *postgres.DB + db *postgres.DB + addressRepository AddressRepository } func NewHeaderSyncLogRepository(db *postgres.DB) HeaderSyncLogRepository { - return HeaderSyncLogRepository{db: db} + return HeaderSyncLogRepository{ + db: db, + addressRepository: AddressRepository{}, + } } type headerSyncLog struct { ID int64 HeaderID int64 `db:"header_id"` - Address string + Address int64 Topics pq.ByteaArray Data []byte BlockNumber uint64 `db:"block_number"` @@ -70,8 +74,12 @@ func (repository HeaderSyncLogRepository) GetUntransformedHeaderSyncLogs() ([]co for _, topic := range rawLog.Topics { logTopics = append(logTopics, common.BytesToHash(topic)) } + address, addrErr := repository.addressRepository.GetAddressById(repository.db, rawLog.Address) + if addrErr != nil { + return nil, addrErr + } reconstructedLog := types.Log{ - Address: common.HexToAddress(rawLog.Address), + Address: common.HexToAddress(address), Topics: logTopics, Data: rawLog.Data, BlockNumber: rawLog.BlockNumber, @@ -102,7 +110,7 @@ func (repository HeaderSyncLogRepository) CreateHeaderSyncLogs(headerID int64, l return txErr } for _, log := range logs { - err := insertLog(headerID, log, tx) + err := repository.insertLog(headerID, log, tx) if err != nil { rollbackErr := tx.Rollback() if rollbackErr != nil { @@ -114,13 +122,17 @@ func (repository HeaderSyncLogRepository) CreateHeaderSyncLogs(headerID int64, l return tx.Commit() } -func insertLog(headerID int64, log types.Log, tx *sqlx.Tx) error { +func (repository HeaderSyncLogRepository) insertLog(headerID int64, log types.Log, tx *sqlx.Tx) error { topics := buildTopics(log) raw, jsonErr := log.MarshalJSON() if jsonErr != nil { return jsonErr } - _, insertErr := tx.Exec(insertHeaderSyncLogQuery, headerID, log.Address.Hex(), topics, log.Data, log.BlockNumber, + addressID, addrErr := repository.addressRepository.GetOrCreateAddressInTransaction(tx, log.Address.Hex()) + if addrErr != nil { + return addrErr + } + _, insertErr := tx.Exec(insertHeaderSyncLogQuery, headerID, addressID, topics, log.Data, log.BlockNumber, log.BlockHash.Hex(), log.TxIndex, log.TxHash.Hex(), log.Index, raw) return insertErr } diff --git a/pkg/datastore/postgres/repositories/header_sync_log_repository_test.go b/pkg/datastore/postgres/repositories/header_sync_log_repository_test.go index 2891b6ba..5cd062a3 100644 --- a/pkg/datastore/postgres/repositories/header_sync_log_repository_test.go +++ b/pkg/datastore/postgres/repositories/header_sync_log_repository_test.go @@ -47,11 +47,16 @@ var _ = Describe("Header sync log repository", func() { repository = repositories.NewHeaderSyncLogRepository(db) }) + AfterEach(func() { + closeErr := db.Close() + Expect(closeErr).NotTo(HaveOccurred()) + }) + Describe("CreateHeaderSyncLogs", func() { - type HeaderSyncLog struct { + type headerSyncLog struct { ID int64 HeaderID int64 `db:"header_id"` - Address string + Address int64 Topics pq.ByteaArray Data []byte BlockNumber uint64 `db:"block_number"` @@ -69,12 +74,15 @@ var _ = Describe("Header sync log repository", func() { err := repository.CreateHeaderSyncLogs(headerID, []types.Log{log}) Expect(err).NotTo(HaveOccurred()) - var dbLog HeaderSyncLog + var dbLog headerSyncLog lookupErr := db.Get(&dbLog, `SELECT * FROM header_sync_logs`) Expect(lookupErr).NotTo(HaveOccurred()) Expect(dbLog.ID).NotTo(BeZero()) Expect(dbLog.HeaderID).To(Equal(headerID)) - Expect(dbLog.Address).To(Equal(log.Address.Hex())) + addressRepository := repositories.AddressRepository{} + actualAddress, addressErr := addressRepository.GetAddressById(db, dbLog.Address) + Expect(addressErr).NotTo(HaveOccurred()) + Expect(actualAddress).To(Equal(log.Address.Hex())) Expect(dbLog.Topics[0]).To(Equal(log.Topics[0].Bytes())) Expect(dbLog.Topics[1]).To(Equal(log.Topics[1].Bytes())) Expect(dbLog.Data).To(Equal(log.Data)) @@ -111,7 +119,7 @@ var _ = Describe("Header sync log repository", func() { err := repository.CreateHeaderSyncLogs(headerID, []types.Log{log}) Expect(err).NotTo(HaveOccurred()) - var dbLog HeaderSyncLog + var dbLog headerSyncLog lookupErr := db.Get(&dbLog, `SELECT * FROM header_sync_logs`) Expect(lookupErr).NotTo(HaveOccurred()) @@ -120,8 +128,11 @@ var _ = Describe("Header sync log repository", func() { logTopics = append(logTopics, common.BytesToHash(topic)) } + addressRepository := repositories.AddressRepository{} + actualAddress, addressErr := addressRepository.GetAddressById(db, dbLog.Address) + Expect(addressErr).NotTo(HaveOccurred()) reconstructedLog := types.Log{ - Address: common.HexToAddress(dbLog.Address), + Address: common.HexToAddress(actualAddress), Topics: logTopics, Data: dbLog.Data, BlockNumber: dbLog.BlockNumber, @@ -147,7 +158,7 @@ var _ = Describe("Header sync log repository", func() { }) }) - Describe("GetFullSyncLogs", func() { + Describe("GetUntransformedHeaderSyncLogs", func() { Describe("when there are no logs", func() { It("returns empty collection", func() { result, err := repository.GetUntransformedHeaderSyncLogs() From ce91b0d9e61b1ddf1af51cc9806a8218c124ec1b Mon Sep 17 00:00:00 2001 From: Rob Mulholand Date: Tue, 10 Sep 2019 13:47:43 -0500 Subject: [PATCH 12/21] Simplify checked header repository tests - Use assertions instead of comments to document expectations - Also randomize the fake timestamp in test data --- .../repositories/block_repository_test.go | 33 +++---- .../checked_headers_repository_test.go | 91 ++++++++----------- .../full_sync_log_repository_test.go | 3 +- pkg/fakes/data.go | 2 +- 4 files changed, 56 insertions(+), 73 deletions(-) diff --git a/pkg/datastore/postgres/repositories/block_repository_test.go b/pkg/datastore/postgres/repositories/block_repository_test.go index a3850190..c0542066 100644 --- a/pkg/datastore/postgres/repositories/block_repository_test.go +++ b/pkg/datastore/postgres/repositories/block_repository_test.go @@ -159,11 +159,12 @@ var _ = Describe("Saving blocks", func() { }) It("saves one uncle associated to the block", func() { + fakeUncle := fakes.GetFakeUncle(common.BytesToHash([]byte{1, 2, 3}).String(), "100000") block := core.Block{ Hash: fakes.FakeHash.String(), Number: 123, Transactions: []core.TransactionModel{fakes.FakeTransaction}, - Uncles: []core.Uncle{fakes.GetFakeUncle(common.BytesToHash([]byte{1, 2, 3}).String(), "100000")}, + Uncles: []core.Uncle{fakeUncle}, UnclesReward: "156250000000000000", } @@ -179,20 +180,20 @@ var _ = Describe("Saving blocks", func() { err := db.Get(&uncleModel, `SELECT hash, reward, miner, raw, block_timestamp FROM uncles WHERE block_id = $1 AND hash = $2`, id, common.BytesToHash([]byte{1, 2, 3}).Hex()) Expect(err).ToNot(HaveOccurred()) - Expect(uncleModel.Hash).To(Equal(common.BytesToHash([]byte{1, 2, 3}).Hex())) - Expect(uncleModel.Reward).To(Equal("100000")) - Expect(uncleModel.Miner).To(Equal(fakes.FakeAddress.Hex())) - Expect(uncleModel.Timestamp).To(Equal("111111111")) + Expect(uncleModel.Hash).To(Equal(fakeUncle.Hash)) + Expect(uncleModel.Reward).To(Equal(fakeUncle.Reward)) + Expect(uncleModel.Miner).To(Equal(fakeUncle.Miner)) + Expect(uncleModel.Timestamp).To(Equal(fakeUncle.Timestamp)) }) It("saves two uncles associated to the block", func() { + fakeUncleOne := fakes.GetFakeUncle(common.BytesToHash([]byte{1, 2, 3}).String(), "100000") + fakeUncleTwo := fakes.GetFakeUncle(common.BytesToHash([]byte{3, 2, 1}).String(), "90000") block := core.Block{ Hash: fakes.FakeHash.String(), Number: 123, Transactions: []core.TransactionModel{fakes.FakeTransaction}, - Uncles: []core.Uncle{ - fakes.GetFakeUncle(common.BytesToHash([]byte{1, 2, 3}).String(), "100000"), - fakes.GetFakeUncle(common.BytesToHash([]byte{3, 2, 1}).String(), "90000")}, + Uncles: []core.Uncle{fakeUncleOne, fakeUncleTwo}, UnclesReward: "312500000000000000", } @@ -210,18 +211,18 @@ var _ = Describe("Saving blocks", func() { err := db.Get(&uncleModel, `SELECT hash, reward, miner, raw, block_timestamp FROM uncles WHERE block_id = $1 AND hash = $2`, id, common.BytesToHash([]byte{1, 2, 3}).Hex()) Expect(err).ToNot(HaveOccurred()) - Expect(uncleModel.Hash).To(Equal(common.BytesToHash([]byte{1, 2, 3}).Hex())) - Expect(uncleModel.Reward).To(Equal("100000")) - Expect(uncleModel.Miner).To(Equal(fakes.FakeAddress.Hex())) - Expect(uncleModel.Timestamp).To(Equal("111111111")) + Expect(uncleModel.Hash).To(Equal(fakeUncleOne.Hash)) + Expect(uncleModel.Reward).To(Equal(fakeUncleOne.Reward)) + Expect(uncleModel.Miner).To(Equal(fakeUncleOne.Miner)) + Expect(uncleModel.Timestamp).To(Equal(fakeUncleOne.Timestamp)) err = db.Get(&uncleModel, `SELECT hash, reward, miner, raw, block_timestamp FROM uncles WHERE block_id = $1 AND hash = $2`, id, common.BytesToHash([]byte{3, 2, 1}).Hex()) Expect(err).ToNot(HaveOccurred()) - Expect(uncleModel.Hash).To(Equal(common.BytesToHash([]byte{3, 2, 1}).Hex())) - Expect(uncleModel.Reward).To(Equal("90000")) - Expect(uncleModel.Miner).To(Equal(fakes.FakeAddress.Hex())) - Expect(uncleModel.Timestamp).To(Equal("111111111")) + Expect(uncleModel.Hash).To(Equal(fakeUncleTwo.Hash)) + Expect(uncleModel.Reward).To(Equal(fakeUncleTwo.Reward)) + Expect(uncleModel.Miner).To(Equal(fakeUncleTwo.Miner)) + Expect(uncleModel.Timestamp).To(Equal(fakeUncleTwo.Timestamp)) }) It(`replaces blocks and transactions associated to the block diff --git a/pkg/datastore/postgres/repositories/checked_headers_repository_test.go b/pkg/datastore/postgres/repositories/checked_headers_repository_test.go index 613432b7..cef5978c 100644 --- a/pkg/datastore/postgres/repositories/checked_headers_repository_test.go +++ b/pkg/datastore/postgres/repositories/checked_headers_repository_test.go @@ -154,13 +154,11 @@ var _ = Describe("Checked Headers repository", func() { Describe("when ending block is specified", func() { It("excludes headers that are out of range", func() { headers, err := repo.UncheckedHeaders(startingBlockNumber, endingBlockNumber, uncheckedCheckCount) - Expect(err).NotTo(HaveOccurred()) - // doesn't include outOfRangeBlockNumber - Expect(len(headers)).To(Equal(3)) - Expect(headers[0].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(endingBlockNumber), Equal(middleBlockNumber))) - Expect(headers[1].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(endingBlockNumber), Equal(middleBlockNumber))) - Expect(headers[2].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(endingBlockNumber), Equal(middleBlockNumber))) + + headerBlockNumbers := getBlockNumbers(headers) + Expect(headerBlockNumbers).To(ConsistOf(startingBlockNumber, middleBlockNumber, endingBlockNumber)) + Expect(headerBlockNumbers).NotTo(ContainElement(outOfRangeBlockNumber)) }) It("excludes headers that have been checked more than the check count", func() { @@ -168,12 +166,11 @@ var _ = Describe("Checked Headers repository", func() { Expect(err).NotTo(HaveOccurred()) headers, err := repo.UncheckedHeaders(startingBlockNumber, endingBlockNumber, uncheckedCheckCount) - Expect(err).NotTo(HaveOccurred()) - // doesn't include middleBlockNumber - Expect(len(headers)).To(Equal(2)) - Expect(headers[0].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(endingBlockNumber))) - Expect(headers[1].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(endingBlockNumber))) + + headerBlockNumbers := getBlockNumbers(headers) + Expect(headerBlockNumbers).To(ConsistOf(startingBlockNumber, endingBlockNumber)) + Expect(headerBlockNumbers).NotTo(ContainElement(middleBlockNumber)) }) It("does not exclude headers that have been checked less than the check count", func() { @@ -181,12 +178,10 @@ var _ = Describe("Checked Headers repository", func() { Expect(err).NotTo(HaveOccurred()) headers, err := repo.UncheckedHeaders(startingBlockNumber, endingBlockNumber, recheckCheckCount) - Expect(err).NotTo(HaveOccurred()) - Expect(len(headers)).To(Equal(3)) - Expect(headers[0].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(middleBlockNumber), Equal(endingBlockNumber))) - Expect(headers[1].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(middleBlockNumber), Equal(endingBlockNumber))) - Expect(headers[2].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(middleBlockNumber), Equal(endingBlockNumber))) + + headerBlockNumbers := getBlockNumbers(headers) + Expect(headerBlockNumbers).To(ConsistOf(startingBlockNumber, middleBlockNumber, endingBlockNumber)) }) It("only returns headers associated with the current node", func() { @@ -198,20 +193,15 @@ var _ = Describe("Checked Headers repository", func() { Expect(err).NotTo(HaveOccurred()) } - Expect(err).NotTo(HaveOccurred()) nodeOneMissingHeaders, err := repo.UncheckedHeaders(startingBlockNumber, endingBlockNumber, uncheckedCheckCount) Expect(err).NotTo(HaveOccurred()) - Expect(len(nodeOneMissingHeaders)).To(Equal(3)) - Expect(nodeOneMissingHeaders[0].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(middleBlockNumber), Equal(endingBlockNumber))) - Expect(nodeOneMissingHeaders[1].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(middleBlockNumber), Equal(endingBlockNumber))) - Expect(nodeOneMissingHeaders[2].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(middleBlockNumber), Equal(endingBlockNumber))) + nodeOneHeaderBlockNumbers := getBlockNumbers(nodeOneMissingHeaders) + Expect(nodeOneHeaderBlockNumbers).To(ConsistOf(startingBlockNumber, middleBlockNumber, endingBlockNumber)) nodeTwoMissingHeaders, err := repoTwo.UncheckedHeaders(startingBlockNumber, endingBlockNumber+10, uncheckedCheckCount) Expect(err).NotTo(HaveOccurred()) - Expect(len(nodeTwoMissingHeaders)).To(Equal(3)) - Expect(nodeTwoMissingHeaders[0].BlockNumber).To(Or(Equal(startingBlockNumber+10), Equal(middleBlockNumber+10), Equal(endingBlockNumber+10))) - Expect(nodeTwoMissingHeaders[1].BlockNumber).To(Or(Equal(startingBlockNumber+10), Equal(middleBlockNumber+10), Equal(endingBlockNumber+10))) - Expect(nodeTwoMissingHeaders[2].BlockNumber).To(Or(Equal(startingBlockNumber+10), Equal(middleBlockNumber+10), Equal(endingBlockNumber+10))) + nodeTwoHeaderBlockNumbers := getBlockNumbers(nodeTwoMissingHeaders) + Expect(nodeTwoHeaderBlockNumbers).To(ConsistOf(startingBlockNumber+10, middleBlockNumber+10, endingBlockNumber+10)) }) }) @@ -220,13 +210,10 @@ var _ = Describe("Checked Headers repository", func() { It("includes all non-checked headers when ending block is -1 ", func() { headers, err := repo.UncheckedHeaders(startingBlockNumber, endingBlock, uncheckedCheckCount) - Expect(err).NotTo(HaveOccurred()) - Expect(len(headers)).To(Equal(4)) - Expect(headers[0].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(endingBlockNumber), Equal(middleBlockNumber), Equal(outOfRangeBlockNumber))) - Expect(headers[1].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(endingBlockNumber), Equal(middleBlockNumber), Equal(outOfRangeBlockNumber))) - Expect(headers[2].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(endingBlockNumber), Equal(middleBlockNumber), Equal(outOfRangeBlockNumber))) - Expect(headers[3].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(endingBlockNumber), Equal(middleBlockNumber), Equal(outOfRangeBlockNumber))) + + headerBlockNumbers := getBlockNumbers(headers) + Expect(headerBlockNumbers).To(ConsistOf(startingBlockNumber, middleBlockNumber, endingBlockNumber, outOfRangeBlockNumber)) }) It("excludes headers that have been checked more than the check count", func() { @@ -234,13 +221,11 @@ var _ = Describe("Checked Headers repository", func() { Expect(err).NotTo(HaveOccurred()) headers, err := repo.UncheckedHeaders(startingBlockNumber, endingBlock, uncheckedCheckCount) - Expect(err).NotTo(HaveOccurred()) - // doesn't include middleBlockNumber - Expect(len(headers)).To(Equal(3)) - Expect(headers[0].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(endingBlockNumber), Equal(outOfRangeBlockNumber))) - Expect(headers[1].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(endingBlockNumber), Equal(outOfRangeBlockNumber))) - Expect(headers[2].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(endingBlockNumber), Equal(outOfRangeBlockNumber))) + + headerBlockNumbers := getBlockNumbers(headers) + Expect(headerBlockNumbers).To(ConsistOf(startingBlockNumber, endingBlockNumber, outOfRangeBlockNumber)) + Expect(headerBlockNumbers).NotTo(ContainElement(middleBlockNumber)) }) It("does not exclude headers that have been checked less than the check count", func() { @@ -248,13 +233,10 @@ var _ = Describe("Checked Headers repository", func() { Expect(err).NotTo(HaveOccurred()) headers, err := repo.UncheckedHeaders(startingBlockNumber, endingBlock, recheckCheckCount) - Expect(err).NotTo(HaveOccurred()) - Expect(len(headers)).To(Equal(4)) - Expect(headers[0].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(middleBlockNumber), Equal(endingBlockNumber), Equal(outOfRangeBlockNumber))) - Expect(headers[1].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(middleBlockNumber), Equal(endingBlockNumber), Equal(outOfRangeBlockNumber))) - Expect(headers[2].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(middleBlockNumber), Equal(endingBlockNumber), Equal(outOfRangeBlockNumber))) - Expect(headers[3].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(middleBlockNumber), Equal(endingBlockNumber), Equal(outOfRangeBlockNumber))) + + headerBlockNumbers := getBlockNumbers(headers) + Expect(headerBlockNumbers).To(ConsistOf(startingBlockNumber, middleBlockNumber, endingBlockNumber, outOfRangeBlockNumber)) }) It("only returns headers associated with the current node", func() { @@ -266,23 +248,24 @@ var _ = Describe("Checked Headers repository", func() { Expect(err).NotTo(HaveOccurred()) } - Expect(err).NotTo(HaveOccurred()) nodeOneMissingHeaders, err := repo.UncheckedHeaders(startingBlockNumber, endingBlock, uncheckedCheckCount) Expect(err).NotTo(HaveOccurred()) - Expect(len(nodeOneMissingHeaders)).To(Equal(4)) - Expect(nodeOneMissingHeaders[0].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(middleBlockNumber), Equal(endingBlockNumber), Equal(outOfRangeBlockNumber))) - Expect(nodeOneMissingHeaders[1].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(middleBlockNumber), Equal(endingBlockNumber), Equal(outOfRangeBlockNumber))) - Expect(nodeOneMissingHeaders[2].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(middleBlockNumber), Equal(endingBlockNumber), Equal(outOfRangeBlockNumber))) - Expect(nodeOneMissingHeaders[3].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(middleBlockNumber), Equal(endingBlockNumber), Equal(outOfRangeBlockNumber))) + nodeOneBlockNumbers := getBlockNumbers(nodeOneMissingHeaders) + Expect(nodeOneBlockNumbers).To(ConsistOf(startingBlockNumber, middleBlockNumber, endingBlockNumber, outOfRangeBlockNumber)) nodeTwoMissingHeaders, err := repoTwo.UncheckedHeaders(startingBlockNumber, endingBlock, uncheckedCheckCount) Expect(err).NotTo(HaveOccurred()) - Expect(len(nodeTwoMissingHeaders)).To(Equal(4)) - Expect(nodeTwoMissingHeaders[0].BlockNumber).To(Or(Equal(startingBlockNumber+10), Equal(middleBlockNumber+10), Equal(endingBlockNumber+10), Equal(outOfRangeBlockNumber+10))) - Expect(nodeTwoMissingHeaders[1].BlockNumber).To(Or(Equal(startingBlockNumber+10), Equal(middleBlockNumber+10), Equal(endingBlockNumber+10), Equal(outOfRangeBlockNumber+10))) - Expect(nodeTwoMissingHeaders[2].BlockNumber).To(Or(Equal(startingBlockNumber+10), Equal(middleBlockNumber+10), Equal(endingBlockNumber+10), Equal(outOfRangeBlockNumber+10))) - Expect(nodeTwoMissingHeaders[3].BlockNumber).To(Or(Equal(startingBlockNumber+10), Equal(middleBlockNumber+10), Equal(endingBlockNumber+10), Equal(outOfRangeBlockNumber+10))) + nodeTwoBlockNumbers := getBlockNumbers(nodeTwoMissingHeaders) + Expect(nodeTwoBlockNumbers).To(ConsistOf(startingBlockNumber+10, middleBlockNumber+10, endingBlockNumber+10, outOfRangeBlockNumber+10)) }) }) }) }) + +func getBlockNumbers(headers []core.Header) []int64 { + var headerBlockNumbers []int64 + for _, header := range headers { + headerBlockNumbers = append(headerBlockNumbers, header.BlockNumber) + } + return headerBlockNumbers +} diff --git a/pkg/datastore/postgres/repositories/full_sync_log_repository_test.go b/pkg/datastore/postgres/repositories/full_sync_log_repository_test.go index 981c242a..508b442a 100644 --- a/pkg/datastore/postgres/repositories/full_sync_log_repository_test.go +++ b/pkg/datastore/postgres/repositories/full_sync_log_repository_test.go @@ -161,7 +161,6 @@ var _ = Describe("Full sync log Repository", func() { }) It("saves the logs attached to a receipt", func() { - logs := []core.FullSyncLog{{ Address: "0x8a4774fe82c63484afef97ca8d89a6ea5e21f973", BlockNumber: 4745407, @@ -215,7 +214,7 @@ var _ = Describe("Full sync log Repository", func() { Expect(err).NotTo(HaveOccurred()) expected := logs[1:] - Expect(retrievedLogs).To(Equal(expected)) + Expect(retrievedLogs).To(ConsistOf(expected)) }) }) }) diff --git a/pkg/fakes/data.go b/pkg/fakes/data.go index 3c6b7046..2865a29b 100644 --- a/pkg/fakes/data.go +++ b/pkg/fakes/data.go @@ -32,7 +32,7 @@ var ( FakeAddress = common.HexToAddress("0x" + RandomString(40)) FakeError = errors.New("failed") FakeHash = common.BytesToHash([]byte{1, 2, 3, 4, 5}) - fakeTimestamp = int64(111111111) + fakeTimestamp = rand.Int63() ) var rawFakeHeader, _ = json.Marshal(types.Header{}) From 3d6c973f6eb1e3219d7a146b3f526c629807258f Mon Sep 17 00:00:00 2001 From: Rob Mulholand Date: Tue, 10 Sep 2019 13:58:58 -0500 Subject: [PATCH 13/21] Remove extraneous db constraint --- db/migrations/00007_create_full_sync_logs_table.sql | 3 +-- db/migrations/00016_add_receipts_fk_to_logs.sql | 6 ------ 2 files changed, 1 insertion(+), 8 deletions(-) diff --git a/db/migrations/00007_create_full_sync_logs_table.sql b/db/migrations/00007_create_full_sync_logs_table.sql index b2dc154d..67cc31bf 100644 --- a/db/migrations/00007_create_full_sync_logs_table.sql +++ b/db/migrations/00007_create_full_sync_logs_table.sql @@ -10,8 +10,7 @@ CREATE TABLE full_sync_logs topic1 VARCHAR(66), topic2 VARCHAR(66), topic3 VARCHAR(66), - data TEXT, - CONSTRAINT full_sync_log_uc UNIQUE (block_number, index) + data TEXT ); diff --git a/db/migrations/00016_add_receipts_fk_to_logs.sql b/db/migrations/00016_add_receipts_fk_to_logs.sql index b2729597..760cd504 100644 --- a/db/migrations/00016_add_receipts_fk_to_logs.sql +++ b/db/migrations/00016_add_receipts_fk_to_logs.sql @@ -1,7 +1,4 @@ -- +goose Up -ALTER TABLE full_sync_logs - DROP CONSTRAINT full_sync_log_uc; - ALTER TABLE full_sync_logs ADD COLUMN receipt_id INT; @@ -18,6 +15,3 @@ ALTER TABLE full_sync_logs ALTER TABLE full_sync_logs DROP COLUMN receipt_id; - -ALTER TABLE full_sync_logs - ADD CONSTRAINT full_sync_log_uc UNIQUE (block_number, index); From c568fedd89f4688c81a659573d1f80552a5d9bca Mon Sep 17 00:00:00 2001 From: Rob Mulholand Date: Tue, 10 Sep 2019 14:20:47 -0500 Subject: [PATCH 14/21] Remove unused functions on mock event repository --- libraries/shared/mocks/event_repository.go | 55 ++-------------------- 1 file changed, 4 insertions(+), 51 deletions(-) diff --git a/libraries/shared/mocks/event_repository.go b/libraries/shared/mocks/event_repository.go index 8c3c1c36..d8d878a4 100644 --- a/libraries/shared/mocks/event_repository.go +++ b/libraries/shared/mocks/event_repository.go @@ -17,24 +17,14 @@ package mocks import ( - . "github.com/onsi/gomega" - - "github.com/vulcanize/vulcanizedb/pkg/core" "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres" ) type MockEventRepository struct { - createError error - markHeaderCheckedError error - MarkHeaderCheckedPassedHeaderIDs []int64 - missingHeaders []core.Header - allHeaders []core.Header - missingHeadersError error - PassedStartingBlockNumber int64 - PassedEndingBlockNumber int64 - PassedModels []interface{} - SetDbCalled bool - CreateCalledCounter int + createError error + PassedModels []interface{} + SetDbCalled bool + CreateCalledCounter int } func (repository *MockEventRepository) Create(models []interface{}) error { @@ -44,47 +34,10 @@ func (repository *MockEventRepository) Create(models []interface{}) error { return repository.createError } -func (repository *MockEventRepository) MarkHeaderChecked(headerID int64) error { - repository.MarkHeaderCheckedPassedHeaderIDs = append(repository.MarkHeaderCheckedPassedHeaderIDs, headerID) - return repository.markHeaderCheckedError -} - -func (repository *MockEventRepository) MissingHeaders(startingBlockNumber, endingBlockNumber int64) ([]core.Header, error) { - repository.PassedStartingBlockNumber = startingBlockNumber - repository.PassedEndingBlockNumber = endingBlockNumber - return repository.missingHeaders, repository.missingHeadersError -} - -func (repository *MockEventRepository) RecheckHeaders(startingBlockNumber, endingBlockNumber int64) ([]core.Header, error) { - repository.PassedStartingBlockNumber = startingBlockNumber - repository.PassedEndingBlockNumber = endingBlockNumber - return repository.allHeaders, nil -} - func (repository *MockEventRepository) SetDB(db *postgres.DB) { repository.SetDbCalled = true } -func (repository *MockEventRepository) SetMissingHeadersError(e error) { - repository.missingHeadersError = e -} - -func (repository *MockEventRepository) SetAllHeaders(headers []core.Header) { - repository.allHeaders = headers -} - -func (repository *MockEventRepository) SetMissingHeaders(headers []core.Header) { - repository.missingHeaders = headers -} - -func (repository *MockEventRepository) SetMarkHeaderCheckedError(e error) { - repository.markHeaderCheckedError = e -} - func (repository *MockEventRepository) SetCreateError(e error) { repository.createError = e } - -func (repository *MockEventRepository) AssertMarkHeaderCheckedCalledWith(i int64) { - Expect(repository.MarkHeaderCheckedPassedHeaderIDs).To(ContainElement(i)) -} From 13d503b851d548b2b2f62addd33b7591cd0243cd Mon Sep 17 00:00:00 2001 From: Rob Mulholand Date: Tue, 10 Sep 2019 14:39:45 -0500 Subject: [PATCH 15/21] Distinguish between missing and unchecked headers - Missing == not in DB - Unchecked == logs haven't been fetched --- cmd/execute.go | 2 +- libraries/shared/constants/checked_headers.go | 2 +- libraries/shared/logs/extractor.go | 22 ++--- libraries/shared/logs/extractor_test.go | 93 ++++++++++--------- libraries/shared/mocks/log_extractor.go | 4 +- libraries/shared/watcher/event_watcher.go | 4 +- .../shared/watcher/event_watcher_test.go | 32 +++---- pkg/fakes/mock_checked_headers_repository.go | 18 ++-- 8 files changed, 89 insertions(+), 88 deletions(-) diff --git a/cmd/execute.go b/cmd/execute.go index 1a71b297..72adf856 100644 --- a/cmd/execute.go +++ b/cmd/execute.go @@ -158,7 +158,7 @@ func watchEthEvents(w *watcher.EventWatcher, wg *syn.WaitGroup) { if recheckHeadersArg { recheck = constants.HeaderRecheck } else { - recheck = constants.HeaderMissing + recheck = constants.HeaderUnchecked } errs := make(chan error) go w.Execute(recheck, errs) diff --git a/libraries/shared/constants/checked_headers.go b/libraries/shared/constants/checked_headers.go index af207039..eea1214d 100644 --- a/libraries/shared/constants/checked_headers.go +++ b/libraries/shared/constants/checked_headers.go @@ -20,6 +20,6 @@ type TransformerExecution bool const ( HeaderRecheck TransformerExecution = true - HeaderMissing TransformerExecution = false + HeaderUnchecked TransformerExecution = false RecheckHeaderCap = int64(5) ) diff --git a/libraries/shared/logs/extractor.go b/libraries/shared/logs/extractor.go index 3e4df88a..d2452870 100644 --- a/libraries/shared/logs/extractor.go +++ b/libraries/shared/logs/extractor.go @@ -31,8 +31,8 @@ import ( var ErrNoWatchedAddresses = errors.New("no watched addresses configured in the log extractor") const ( - missingHeadersFound = true - noMissingHeadersFound = false + uncheckedHeadersFound = true + noUncheckedHeadersFound = false ) type ILogExtractor interface { @@ -74,47 +74,47 @@ func (extractor *LogExtractor) AddTransformerConfig(config transformer.EventTran func (extractor LogExtractor) ExtractLogs(recheckHeaders constants.TransformerExecution) (error, bool) { if len(extractor.Addresses) < 1 { logrus.Errorf("error extracting logs: %s", ErrNoWatchedAddresses.Error()) - return ErrNoWatchedAddresses, noMissingHeadersFound + return ErrNoWatchedAddresses, noUncheckedHeadersFound } uncheckedHeaders, uncheckedHeadersErr := extractor.CheckedHeadersRepository.UncheckedHeaders(*extractor.StartingBlock, -1, getCheckCount(recheckHeaders)) if uncheckedHeadersErr != nil { logrus.Errorf("error fetching missing headers: %s", uncheckedHeadersErr) - return uncheckedHeadersErr, noMissingHeadersFound + return uncheckedHeadersErr, noUncheckedHeadersFound } if len(uncheckedHeaders) < 1 { - return nil, noMissingHeadersFound + return nil, noUncheckedHeadersFound } for _, header := range uncheckedHeaders { logs, fetchLogsErr := extractor.Fetcher.FetchLogs(extractor.Addresses, extractor.Topics, header) if fetchLogsErr != nil { logError("error fetching logs for header: %s", fetchLogsErr, header) - return fetchLogsErr, missingHeadersFound + return fetchLogsErr, uncheckedHeadersFound } if len(logs) > 0 { transactionsSyncErr := extractor.Syncer.SyncTransactions(header.Id, logs) if transactionsSyncErr != nil { logError("error syncing transactions: %s", transactionsSyncErr, header) - return transactionsSyncErr, missingHeadersFound + return transactionsSyncErr, uncheckedHeadersFound } createLogsErr := extractor.LogRepository.CreateHeaderSyncLogs(header.Id, logs) if createLogsErr != nil { logError("error persisting logs: %s", createLogsErr, header) - return createLogsErr, missingHeadersFound + return createLogsErr, uncheckedHeadersFound } } markHeaderCheckedErr := extractor.CheckedHeadersRepository.MarkHeaderChecked(header.Id) if markHeaderCheckedErr != nil { logError("error marking header checked: %s", markHeaderCheckedErr, header) - return markHeaderCheckedErr, missingHeadersFound + return markHeaderCheckedErr, uncheckedHeadersFound } } - return nil, missingHeadersFound + return nil, uncheckedHeadersFound } func earlierStartingBlockNumber(transformerBlock, watcherBlock int64) bool { @@ -130,7 +130,7 @@ func logError(description string, err error, header core.Header) { } func getCheckCount(recheckHeaders constants.TransformerExecution) int64 { - if recheckHeaders == constants.HeaderMissing { + if recheckHeaders == constants.HeaderUnchecked { return 1 } else { return constants.RecheckHeaderCap diff --git a/libraries/shared/logs/extractor_test.go b/libraries/shared/logs/extractor_test.go index 1ce47cc7..2a3ce5e9 100644 --- a/libraries/shared/logs/extractor_test.go +++ b/libraries/shared/logs/extractor_test.go @@ -157,33 +157,33 @@ var _ = Describe("Log extractor", func() { Describe("ExtractLogs", func() { It("returns error if no watched addresses configured", func() { - err, _ := extractor.ExtractLogs(constants.HeaderMissing) + err, _ := extractor.ExtractLogs(constants.HeaderUnchecked) Expect(err).To(HaveOccurred()) Expect(err).To(MatchError(logs.ErrNoWatchedAddresses)) }) - Describe("when checking missing headers", func() { - It("gets missing headers since configured starting block with check_count < 1", func() { + Describe("when checking unchecked headers", func() { + It("gets headers since configured starting block with check_count < 1", func() { mockCheckedHeadersRepository := &fakes.MockCheckedHeadersRepository{} - mockCheckedHeadersRepository.MissingHeadersReturnHeaders = []core.Header{{}} + mockCheckedHeadersRepository.UncheckedHeadersReturnHeaders = []core.Header{{}} extractor.CheckedHeadersRepository = mockCheckedHeadersRepository startingBlockNumber := rand.Int63() extractor.AddTransformerConfig(getTransformerConfig(startingBlockNumber)) - err, _ := extractor.ExtractLogs(constants.HeaderMissing) + err, _ := extractor.ExtractLogs(constants.HeaderUnchecked) Expect(err).NotTo(HaveOccurred()) - Expect(mockCheckedHeadersRepository.MissingHeadersStartingBlockNumber).To(Equal(startingBlockNumber)) - Expect(mockCheckedHeadersRepository.MissingHeadersEndingBlockNumber).To(Equal(int64(-1))) - Expect(mockCheckedHeadersRepository.MissingHeadersCheckCount).To(Equal(int64(1))) + Expect(mockCheckedHeadersRepository.UncheckedHeadersStartingBlockNumber).To(Equal(startingBlockNumber)) + Expect(mockCheckedHeadersRepository.UncheckedHeadersEndingBlockNumber).To(Equal(int64(-1))) + Expect(mockCheckedHeadersRepository.UncheckedHeadersCheckCount).To(Equal(int64(1))) }) }) Describe("when rechecking headers", func() { - It("gets missing headers since configured starting block with check_count < RecheckHeaderCap", func() { + It("gets headers since configured starting block with check_count < RecheckHeaderCap", func() { mockCheckedHeadersRepository := &fakes.MockCheckedHeadersRepository{} - mockCheckedHeadersRepository.MissingHeadersReturnHeaders = []core.Header{{}} + mockCheckedHeadersRepository.UncheckedHeadersReturnHeaders = []core.Header{{}} extractor.CheckedHeadersRepository = mockCheckedHeadersRepository startingBlockNumber := rand.Int63() extractor.AddTransformerConfig(getTransformerConfig(startingBlockNumber)) @@ -191,60 +191,61 @@ var _ = Describe("Log extractor", func() { err, _ := extractor.ExtractLogs(constants.HeaderRecheck) Expect(err).NotTo(HaveOccurred()) - Expect(mockCheckedHeadersRepository.MissingHeadersStartingBlockNumber).To(Equal(startingBlockNumber)) - Expect(mockCheckedHeadersRepository.MissingHeadersEndingBlockNumber).To(Equal(int64(-1))) - Expect(mockCheckedHeadersRepository.MissingHeadersCheckCount).To(Equal(constants.RecheckHeaderCap)) + Expect(mockCheckedHeadersRepository.UncheckedHeadersStartingBlockNumber).To(Equal(startingBlockNumber)) + Expect(mockCheckedHeadersRepository.UncheckedHeadersEndingBlockNumber).To(Equal(int64(-1))) + Expect(mockCheckedHeadersRepository.UncheckedHeadersCheckCount).To(Equal(constants.RecheckHeaderCap)) }) }) - It("emits error if getting missing headers fails", func() { + It("emits error if getting unchecked headers fails", func() { addTransformerConfig(extractor) mockCheckedHeadersRepository := &fakes.MockCheckedHeadersRepository{} - mockCheckedHeadersRepository.MissingHeadersReturnError = fakes.FakeError + mockCheckedHeadersRepository.UncheckedHeadersReturnError = fakes.FakeError extractor.CheckedHeadersRepository = mockCheckedHeadersRepository - err, _ := extractor.ExtractLogs(constants.HeaderMissing) + err, _ := extractor.ExtractLogs(constants.HeaderUnchecked) Expect(err).To(HaveOccurred()) Expect(err).To(MatchError(fakes.FakeError)) }) - Describe("when no missing headers", func() { + Describe("when no unchecked headers", func() { It("does not fetch logs", func() { addTransformerConfig(extractor) mockLogFetcher := &mocks.MockLogFetcher{} extractor.Fetcher = mockLogFetcher - err, _ := extractor.ExtractLogs(constants.HeaderMissing) + err, _ := extractor.ExtractLogs(constants.HeaderUnchecked) Expect(err).NotTo(HaveOccurred()) Expect(mockLogFetcher.FetchCalled).To(BeFalse()) }) - It("emits that no missing headers were found", func() { + It("emits that no unchecked headers were found", func() { addTransformerConfig(extractor) mockLogFetcher := &mocks.MockLogFetcher{} extractor.Fetcher = mockLogFetcher - _, missingHeadersFound := extractor.ExtractLogs(constants.HeaderMissing) + _, uncheckedHeadersFound := extractor.ExtractLogs(constants.HeaderUnchecked) - Expect(missingHeadersFound).To(BeFalse()) + Expect(uncheckedHeadersFound).To(BeFalse()) }) }) - Describe("when there are missing headers", func() { - It("fetches logs for missing headers", func() { - addMissingHeader(extractor) + Describe("when there are unchecked headers", func() { + It("fetches logs for unchecked headers", func() { + addUncheckedHeader(extractor) config := transformer.EventTransformerConfig{ ContractAddresses: []string{fakes.FakeAddress.Hex()}, Topic: fakes.FakeHash.Hex(), StartingBlockNumber: rand.Int63(), } - extractor.AddTransformerConfig(config) + addTransformerErr := extractor.AddTransformerConfig(config) + Expect(addTransformerErr).NotTo(HaveOccurred()) mockLogFetcher := &mocks.MockLogFetcher{} extractor.Fetcher = mockLogFetcher - err, _ := extractor.ExtractLogs(constants.HeaderMissing) + err, _ := extractor.ExtractLogs(constants.HeaderUnchecked) Expect(err).NotTo(HaveOccurred()) Expect(mockLogFetcher.FetchCalled).To(BeTrue()) @@ -255,13 +256,13 @@ var _ = Describe("Log extractor", func() { }) It("returns error if fetching logs fails", func() { - addMissingHeader(extractor) + addUncheckedHeader(extractor) addTransformerConfig(extractor) mockLogFetcher := &mocks.MockLogFetcher{} mockLogFetcher.ReturnError = fakes.FakeError extractor.Fetcher = mockLogFetcher - err, _ := extractor.ExtractLogs(constants.HeaderMissing) + err, _ := extractor.ExtractLogs(constants.HeaderUnchecked) Expect(err).To(HaveOccurred()) Expect(err).To(MatchError(fakes.FakeError)) @@ -269,12 +270,12 @@ var _ = Describe("Log extractor", func() { Describe("when no fetched logs", func() { It("does not sync transactions", func() { - addMissingHeader(extractor) + addUncheckedHeader(extractor) addTransformerConfig(extractor) mockTransactionSyncer := &fakes.MockTransactionSyncer{} extractor.Syncer = mockTransactionSyncer - err, _ := extractor.ExtractLogs(constants.HeaderMissing) + err, _ := extractor.ExtractLogs(constants.HeaderUnchecked) Expect(err).NotTo(HaveOccurred()) Expect(mockTransactionSyncer.SyncTransactionsCalled).To(BeFalse()) @@ -283,34 +284,34 @@ var _ = Describe("Log extractor", func() { Describe("when there are fetched logs", func() { It("syncs transactions", func() { - addMissingHeader(extractor) + addUncheckedHeader(extractor) addFetchedLog(extractor) addTransformerConfig(extractor) mockTransactionSyncer := &fakes.MockTransactionSyncer{} extractor.Syncer = mockTransactionSyncer - err, _ := extractor.ExtractLogs(constants.HeaderMissing) + err, _ := extractor.ExtractLogs(constants.HeaderUnchecked) Expect(err).NotTo(HaveOccurred()) Expect(mockTransactionSyncer.SyncTransactionsCalled).To(BeTrue()) }) It("returns error if syncing transactions fails", func() { - addMissingHeader(extractor) + addUncheckedHeader(extractor) addFetchedLog(extractor) addTransformerConfig(extractor) mockTransactionSyncer := &fakes.MockTransactionSyncer{} mockTransactionSyncer.SyncTransactionsError = fakes.FakeError extractor.Syncer = mockTransactionSyncer - err, _ := extractor.ExtractLogs(constants.HeaderMissing) + err, _ := extractor.ExtractLogs(constants.HeaderUnchecked) Expect(err).To(HaveOccurred()) Expect(err).To(MatchError(fakes.FakeError)) }) It("persists fetched logs", func() { - addMissingHeader(extractor) + addUncheckedHeader(extractor) addTransformerConfig(extractor) fakeLogs := []types.Log{{ Address: common.HexToAddress("0xA"), @@ -323,21 +324,21 @@ var _ = Describe("Log extractor", func() { mockLogRepository := &fakes.MockHeaderSyncLogRepository{} extractor.LogRepository = mockLogRepository - err, _ := extractor.ExtractLogs(constants.HeaderMissing) + err, _ := extractor.ExtractLogs(constants.HeaderUnchecked) Expect(err).NotTo(HaveOccurred()) Expect(mockLogRepository.PassedLogs).To(Equal(fakeLogs)) }) It("returns error if persisting logs fails", func() { - addMissingHeader(extractor) + addUncheckedHeader(extractor) addFetchedLog(extractor) addTransformerConfig(extractor) mockLogRepository := &fakes.MockHeaderSyncLogRepository{} mockLogRepository.CreateError = fakes.FakeError extractor.LogRepository = mockLogRepository - err, _ := extractor.ExtractLogs(constants.HeaderMissing) + err, _ := extractor.ExtractLogs(constants.HeaderUnchecked) Expect(err).To(HaveOccurred()) Expect(err).To(MatchError(fakes.FakeError)) @@ -349,10 +350,10 @@ var _ = Describe("Log extractor", func() { addTransformerConfig(extractor) mockCheckedHeadersRepository := &fakes.MockCheckedHeadersRepository{} headerID := rand.Int63() - mockCheckedHeadersRepository.MissingHeadersReturnHeaders = []core.Header{{Id: headerID}} + mockCheckedHeadersRepository.UncheckedHeadersReturnHeaders = []core.Header{{Id: headerID}} extractor.CheckedHeadersRepository = mockCheckedHeadersRepository - err, _ := extractor.ExtractLogs(constants.HeaderMissing) + err, _ := extractor.ExtractLogs(constants.HeaderUnchecked) Expect(err).NotTo(HaveOccurred()) Expect(mockCheckedHeadersRepository.MarkHeaderCheckedHeaderID).To(Equal(headerID)) @@ -362,21 +363,21 @@ var _ = Describe("Log extractor", func() { addFetchedLog(extractor) addTransformerConfig(extractor) mockCheckedHeadersRepository := &fakes.MockCheckedHeadersRepository{} - mockCheckedHeadersRepository.MissingHeadersReturnHeaders = []core.Header{{Id: rand.Int63()}} + mockCheckedHeadersRepository.UncheckedHeadersReturnHeaders = []core.Header{{Id: rand.Int63()}} mockCheckedHeadersRepository.MarkHeaderCheckedReturnError = fakes.FakeError extractor.CheckedHeadersRepository = mockCheckedHeadersRepository - err, _ := extractor.ExtractLogs(constants.HeaderMissing) + err, _ := extractor.ExtractLogs(constants.HeaderUnchecked) Expect(err).To(HaveOccurred()) Expect(err).To(MatchError(fakes.FakeError)) }) It("emits that missing headers were found", func() { - addMissingHeader(extractor) + addUncheckedHeader(extractor) addTransformerConfig(extractor) - err, missingHeadersFound := extractor.ExtractLogs(constants.HeaderMissing) + err, missingHeadersFound := extractor.ExtractLogs(constants.HeaderUnchecked) Expect(err).NotTo(HaveOccurred()) Expect(missingHeadersFound).To(BeTrue()) @@ -394,9 +395,9 @@ func addTransformerConfig(extractor *logs.LogExtractor) { extractor.AddTransformerConfig(fakeConfig) } -func addMissingHeader(extractor *logs.LogExtractor) { +func addUncheckedHeader(extractor *logs.LogExtractor) { mockCheckedHeadersRepository := &fakes.MockCheckedHeadersRepository{} - mockCheckedHeadersRepository.MissingHeadersReturnHeaders = []core.Header{{}} + mockCheckedHeadersRepository.UncheckedHeadersReturnHeaders = []core.Header{{}} extractor.CheckedHeadersRepository = mockCheckedHeadersRepository } diff --git a/libraries/shared/mocks/log_extractor.go b/libraries/shared/mocks/log_extractor.go index 54511f98..363d27b5 100644 --- a/libraries/shared/mocks/log_extractor.go +++ b/libraries/shared/mocks/log_extractor.go @@ -26,7 +26,7 @@ type MockLogExtractor struct { AddTransformerConfigError error ExtractLogsCount int ExtractLogsErrors []error - MissingHeadersExist []bool + UncheckedHeadersExist []bool } func (extractor *MockLogExtractor) AddTransformerConfig(config transformer.EventTransformerConfig) error { @@ -42,6 +42,6 @@ func (extractor *MockLogExtractor) ExtractLogs(recheckHeaders constants.Transfor return errorThisRun, false } var missingHeadersExist bool - missingHeadersExist, extractor.MissingHeadersExist = extractor.MissingHeadersExist[0], extractor.MissingHeadersExist[1:] + missingHeadersExist, extractor.UncheckedHeadersExist = extractor.UncheckedHeadersExist[0], extractor.UncheckedHeadersExist[1:] return nil, missingHeadersExist } diff --git a/libraries/shared/watcher/event_watcher.go b/libraries/shared/watcher/event_watcher.go index 04386c63..80b0a023 100644 --- a/libraries/shared/watcher/event_watcher.go +++ b/libraries/shared/watcher/event_watcher.go @@ -94,12 +94,12 @@ func (watcher *EventWatcher) Execute(recheckHeaders constants.TransformerExecuti } func (watcher *EventWatcher) extractLogs(recheckHeaders constants.TransformerExecution, errs chan error) { - err, missingHeadersFound := watcher.LogExtractor.ExtractLogs(recheckHeaders) + err, uncheckedHeadersFound := watcher.LogExtractor.ExtractLogs(recheckHeaders) if err != nil { errs <- err } - if missingHeadersFound { + if uncheckedHeadersFound { watcher.extractLogs(recheckHeaders, errs) } else { time.Sleep(NoNewDataPause) diff --git a/libraries/shared/watcher/event_watcher_test.go b/libraries/shared/watcher/event_watcher_test.go index be165396..08206d39 100644 --- a/libraries/shared/watcher/event_watcher_test.go +++ b/libraries/shared/watcher/event_watcher_test.go @@ -88,9 +88,9 @@ var _ = Describe("Event Watcher", func() { delegator.DelegateErrors = []error{nil} delegator.LogsFound = []bool{false} extractor.ExtractLogsErrors = []error{nil} - extractor.MissingHeadersExist = []bool{false} + extractor.UncheckedHeadersExist = []bool{false} - go eventWatcher.Execute(constants.HeaderMissing, errsChan) + go eventWatcher.Execute(constants.HeaderUnchecked, errsChan) Eventually(func() int { return extractor.ExtractLogsCount @@ -102,9 +102,9 @@ var _ = Describe("Event Watcher", func() { delegator.DelegateErrors = []error{nil} delegator.LogsFound = []bool{false} extractor.ExtractLogsErrors = []error{fakes.FakeError} - extractor.MissingHeadersExist = []bool{false} + extractor.UncheckedHeadersExist = []bool{false} - go eventWatcher.Execute(constants.HeaderMissing, errsChan) + go eventWatcher.Execute(constants.HeaderUnchecked, errsChan) Expect(<-errsChan).To(MatchError(fakes.FakeError)) close(done) @@ -114,9 +114,9 @@ var _ = Describe("Event Watcher", func() { delegator.DelegateErrors = []error{nil} delegator.LogsFound = []bool{false} extractor.ExtractLogsErrors = []error{nil, nil} - extractor.MissingHeadersExist = []bool{true, false} + extractor.UncheckedHeadersExist = []bool{true, false} - go eventWatcher.Execute(constants.HeaderMissing, errsChan) + go eventWatcher.Execute(constants.HeaderUnchecked, errsChan) Eventually(func() int { return extractor.ExtractLogsCount @@ -128,9 +128,9 @@ var _ = Describe("Event Watcher", func() { delegator.DelegateErrors = []error{nil} delegator.LogsFound = []bool{false} extractor.ExtractLogsErrors = []error{nil, fakes.FakeError} - extractor.MissingHeadersExist = []bool{true, false} + extractor.UncheckedHeadersExist = []bool{true, false} - go eventWatcher.Execute(constants.HeaderMissing, errsChan) + go eventWatcher.Execute(constants.HeaderUnchecked, errsChan) Expect(<-errsChan).To(MatchError(fakes.FakeError)) close(done) @@ -141,9 +141,9 @@ var _ = Describe("Event Watcher", func() { delegator.DelegateErrors = []error{nil} delegator.LogsFound = []bool{false} extractor.ExtractLogsErrors = []error{nil} - extractor.MissingHeadersExist = []bool{false} + extractor.UncheckedHeadersExist = []bool{false} - go eventWatcher.Execute(constants.HeaderMissing, errsChan) + go eventWatcher.Execute(constants.HeaderUnchecked, errsChan) Eventually(func() int { return delegator.DelegateCallCount @@ -155,9 +155,9 @@ var _ = Describe("Event Watcher", func() { delegator.LogsFound = []bool{false} delegator.DelegateErrors = []error{fakes.FakeError} extractor.ExtractLogsErrors = []error{nil} - extractor.MissingHeadersExist = []bool{false} + extractor.UncheckedHeadersExist = []bool{false} - go eventWatcher.Execute(constants.HeaderMissing, errsChan) + go eventWatcher.Execute(constants.HeaderUnchecked, errsChan) Expect(<-errsChan).To(MatchError(fakes.FakeError)) close(done) @@ -167,9 +167,9 @@ var _ = Describe("Event Watcher", func() { delegator.DelegateErrors = []error{nil, nil} delegator.LogsFound = []bool{true, false} extractor.ExtractLogsErrors = []error{nil} - extractor.MissingHeadersExist = []bool{false} + extractor.UncheckedHeadersExist = []bool{false} - go eventWatcher.Execute(constants.HeaderMissing, errsChan) + go eventWatcher.Execute(constants.HeaderUnchecked, errsChan) Eventually(func() int { return delegator.DelegateCallCount @@ -181,9 +181,9 @@ var _ = Describe("Event Watcher", func() { delegator.DelegateErrors = []error{nil, fakes.FakeError} delegator.LogsFound = []bool{true, false} extractor.ExtractLogsErrors = []error{nil} - extractor.MissingHeadersExist = []bool{false} + extractor.UncheckedHeadersExist = []bool{false} - go eventWatcher.Execute(constants.HeaderMissing, errsChan) + go eventWatcher.Execute(constants.HeaderUnchecked, errsChan) Expect(<-errsChan).To(MatchError(fakes.FakeError)) close(done) diff --git a/pkg/fakes/mock_checked_headers_repository.go b/pkg/fakes/mock_checked_headers_repository.go index 9781829c..4ef6b9c7 100644 --- a/pkg/fakes/mock_checked_headers_repository.go +++ b/pkg/fakes/mock_checked_headers_repository.go @@ -26,11 +26,11 @@ type MockCheckedHeadersRepository struct { MarkHeadersUncheckedCalled bool MarkHeadersUncheckedReturnError error MarkHeadersUncheckedStartingBlockNumber int64 - MissingHeadersCheckCount int64 - MissingHeadersEndingBlockNumber int64 - MissingHeadersReturnError error - MissingHeadersReturnHeaders []core.Header - MissingHeadersStartingBlockNumber int64 + UncheckedHeadersCheckCount int64 + UncheckedHeadersEndingBlockNumber int64 + UncheckedHeadersReturnError error + UncheckedHeadersReturnHeaders []core.Header + UncheckedHeadersStartingBlockNumber int64 } func (repository *MockCheckedHeadersRepository) MarkHeadersUnchecked(startingBlockNumber int64) error { @@ -45,8 +45,8 @@ func (repository *MockCheckedHeadersRepository) MarkHeaderChecked(headerID int64 } func (repository *MockCheckedHeadersRepository) UncheckedHeaders(startingBlockNumber, endingBlockNumber, checkCount int64) ([]core.Header, error) { - repository.MissingHeadersStartingBlockNumber = startingBlockNumber - repository.MissingHeadersEndingBlockNumber = endingBlockNumber - repository.MissingHeadersCheckCount = checkCount - return repository.MissingHeadersReturnHeaders, repository.MissingHeadersReturnError + repository.UncheckedHeadersStartingBlockNumber = startingBlockNumber + repository.UncheckedHeadersEndingBlockNumber = endingBlockNumber + repository.UncheckedHeadersCheckCount = checkCount + return repository.UncheckedHeadersReturnHeaders, repository.UncheckedHeadersReturnError } From 3f9b034c4c1a43cb2fb88ae82c8921685aeae31b Mon Sep 17 00:00:00 2001 From: Rob Mulholand Date: Tue, 10 Sep 2019 21:22:14 -0500 Subject: [PATCH 16/21] Rename checked_logs => watched_logs - We're logging that a given log has been included in any fetch calls for checked headers, rather than that we have already checked for that log --- ...ql => 00030_create_watched_logs_table.sql} | 4 +- db/schema.sql | 92 +++++++++---------- libraries/shared/logs/extractor.go | 14 +-- libraries/shared/logs/extractor_test.go | 12 +-- .../repositories/checked_logs_repository.go | 10 +- .../checked_logs_repository_test.go | 28 +++--- pkg/datastore/repository.go | 4 +- pkg/fakes/checked_logs_repository.go | 30 +++--- test_config/test_config.go | 2 +- 9 files changed, 98 insertions(+), 98 deletions(-) rename db/migrations/{00030_create_checked_logs_table.sql => 00030_create_watched_logs_table.sql} (81%) diff --git a/db/migrations/00030_create_checked_logs_table.sql b/db/migrations/00030_create_watched_logs_table.sql similarity index 81% rename from db/migrations/00030_create_checked_logs_table.sql rename to db/migrations/00030_create_watched_logs_table.sql index 1a77560a..4268a68a 100644 --- a/db/migrations/00030_create_checked_logs_table.sql +++ b/db/migrations/00030_create_watched_logs_table.sql @@ -1,6 +1,6 @@ -- +goose Up -- SQL in this section is executed when the migration is applied. -CREATE TABLE public.checked_logs +CREATE TABLE public.watched_logs ( id SERIAL PRIMARY KEY, contract_address VARCHAR(42), @@ -9,4 +9,4 @@ CREATE TABLE public.checked_logs -- +goose Down -- SQL in this section is executed when the migration is rolled back. -DROP TABLE public.checked_logs; +DROP TABLE public.watched_logs; diff --git a/db/schema.sql b/db/schema.sql index 0a5bb3a1..eeb1c81e 100644 --- a/db/schema.sql +++ b/db/schema.sql @@ -155,37 +155,6 @@ CREATE SEQUENCE public.checked_headers_id_seq ALTER SEQUENCE public.checked_headers_id_seq OWNED BY public.checked_headers.id; --- --- Name: checked_logs; Type: TABLE; Schema: public; Owner: - --- - -CREATE TABLE public.checked_logs ( - id integer NOT NULL, - contract_address character varying(42), - topic_zero character varying(66) -); - - --- --- Name: checked_logs_id_seq; Type: SEQUENCE; Schema: public; Owner: - --- - -CREATE SEQUENCE public.checked_logs_id_seq - AS integer - START WITH 1 - INCREMENT BY 1 - NO MINVALUE - NO MAXVALUE - CACHE 1; - - --- --- Name: checked_logs_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: - --- - -ALTER SEQUENCE public.checked_logs_id_seq OWNED BY public.checked_logs.id; - - -- -- Name: eth_nodes; Type: TABLE; Schema: public; Owner: - -- @@ -666,6 +635,37 @@ CREATE VIEW public.watched_event_logs AS WHERE ((((log_filters.topic0)::text = (full_sync_logs.topic0)::text) OR (log_filters.topic0 IS NULL)) AND (((log_filters.topic1)::text = (full_sync_logs.topic1)::text) OR (log_filters.topic1 IS NULL)) AND (((log_filters.topic2)::text = (full_sync_logs.topic2)::text) OR (log_filters.topic2 IS NULL)) AND (((log_filters.topic3)::text = (full_sync_logs.topic3)::text) OR (log_filters.topic3 IS NULL))); +-- +-- Name: watched_logs; Type: TABLE; Schema: public; Owner: - +-- + +CREATE TABLE public.watched_logs ( + id integer NOT NULL, + contract_address character varying(42), + topic_zero character varying(66) +); + + +-- +-- Name: watched_logs_id_seq; Type: SEQUENCE; Schema: public; Owner: - +-- + +CREATE SEQUENCE public.watched_logs_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +-- +-- Name: watched_logs_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: - +-- + +ALTER SEQUENCE public.watched_logs_id_seq OWNED BY public.watched_logs.id; + + -- -- Name: addresses id; Type: DEFAULT; Schema: public; Owner: - -- @@ -687,13 +687,6 @@ ALTER TABLE ONLY public.blocks ALTER COLUMN id SET DEFAULT nextval('public.block ALTER TABLE ONLY public.checked_headers ALTER COLUMN id SET DEFAULT nextval('public.checked_headers_id_seq'::regclass); --- --- Name: checked_logs id; Type: DEFAULT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.checked_logs ALTER COLUMN id SET DEFAULT nextval('public.checked_logs_id_seq'::regclass); - - -- -- Name: eth_nodes id; Type: DEFAULT; Schema: public; Owner: - -- @@ -785,6 +778,13 @@ ALTER TABLE ONLY public.uncles ALTER COLUMN id SET DEFAULT nextval('public.uncle ALTER TABLE ONLY public.watched_contracts ALTER COLUMN contract_id SET DEFAULT nextval('public.watched_contracts_contract_id_seq'::regclass); +-- +-- Name: watched_logs id; Type: DEFAULT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.watched_logs ALTER COLUMN id SET DEFAULT nextval('public.watched_logs_id_seq'::regclass); + + -- -- Name: addresses addresses_address_key; Type: CONSTRAINT; Schema: public; Owner: - -- @@ -825,14 +825,6 @@ ALTER TABLE ONLY public.checked_headers ADD CONSTRAINT checked_headers_pkey PRIMARY KEY (id); --- --- Name: checked_logs checked_logs_pkey; Type: CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.checked_logs - ADD CONSTRAINT checked_logs_pkey PRIMARY KEY (id); - - -- -- Name: blocks eth_node_id_block_number_uc; Type: CONSTRAINT; Schema: public; Owner: - -- @@ -1001,6 +993,14 @@ ALTER TABLE ONLY public.watched_contracts ADD CONSTRAINT watched_contracts_pkey PRIMARY KEY (contract_id); +-- +-- Name: watched_logs watched_logs_pkey; Type: CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.watched_logs + ADD CONSTRAINT watched_logs_pkey PRIMARY KEY (id); + + -- -- Name: block_id_index; Type: INDEX; Schema: public; Owner: - -- diff --git a/libraries/shared/logs/extractor.go b/libraries/shared/logs/extractor.go index d2452870..36fe6eda 100644 --- a/libraries/shared/logs/extractor.go +++ b/libraries/shared/logs/extractor.go @@ -138,18 +138,18 @@ func getCheckCount(recheckHeaders constants.TransformerExecution) int64 { } func (extractor *LogExtractor) updateCheckedHeaders(config transformer.EventTransformerConfig) error { - hasBeenChecked, hasBeenCheckedErr := extractor.CheckedLogsRepository.HaveLogsBeenChecked(config.ContractAddresses, config.Topic) - if hasBeenCheckedErr != nil { - return hasBeenCheckedErr + alreadyWatchingLog, watchingLogErr := extractor.CheckedLogsRepository.AlreadyWatchingLog(config.ContractAddresses, config.Topic) + if watchingLogErr != nil { + return watchingLogErr } - if !hasBeenChecked { + if !alreadyWatchingLog { uncheckHeadersErr := extractor.CheckedHeadersRepository.MarkHeadersUnchecked(config.StartingBlockNumber) if uncheckHeadersErr != nil { return uncheckHeadersErr } - markLogsCheckedErr := extractor.CheckedLogsRepository.MarkLogsChecked(config.ContractAddresses, config.Topic) - if markLogsCheckedErr != nil { - return markLogsCheckedErr + markLogWatchedErr := extractor.CheckedLogsRepository.MarkLogWatched(config.ContractAddresses, config.Topic) + if markLogWatchedErr != nil { + return markLogWatchedErr } } return nil diff --git a/libraries/shared/logs/extractor_test.go b/libraries/shared/logs/extractor_test.go index 2a3ce5e9..aecd8e28 100644 --- a/libraries/shared/logs/extractor_test.go +++ b/libraries/shared/logs/extractor_test.go @@ -91,7 +91,7 @@ var _ = Describe("Log extractor", func() { }) It("returns error if checking whether log has been checked returns error", func() { - checkedLogsRepository.HasLogBeenCheckedError = fakes.FakeError + checkedLogsRepository.AlreadyWatchingLogError = fakes.FakeError err := extractor.AddTransformerConfig(getTransformerConfig(rand.Int63())) @@ -101,7 +101,7 @@ var _ = Describe("Log extractor", func() { Describe("when log has previously been checked", func() { It("does not mark any headers unchecked", func() { - checkedLogsRepository.HasLogBeenCheckedReturn = true + checkedLogsRepository.AlreadyWatchingLogReturn = true err := extractor.AddTransformerConfig(getTransformerConfig(rand.Int63())) @@ -112,7 +112,7 @@ var _ = Describe("Log extractor", func() { Describe("when log has not previously been checked", func() { BeforeEach(func() { - checkedLogsRepository.HasLogBeenCheckedReturn = false + checkedLogsRepository.AlreadyWatchingLogReturn = false }) It("marks headers since transformer's starting block number as unchecked", func() { @@ -140,12 +140,12 @@ var _ = Describe("Log extractor", func() { err := extractor.AddTransformerConfig(config) Expect(err).NotTo(HaveOccurred()) - Expect(checkedLogsRepository.MarkLogCheckedAddresses).To(Equal(config.ContractAddresses)) - Expect(checkedLogsRepository.MarkLogCheckedTopicZero).To(Equal(config.Topic)) + Expect(checkedLogsRepository.MarkLogWatchedAddresses).To(Equal(config.ContractAddresses)) + Expect(checkedLogsRepository.MarkLogWatchedTopicZero).To(Equal(config.Topic)) }) It("returns error if marking logs checked returns error", func() { - checkedLogsRepository.MarkLogCheckedError = fakes.FakeError + checkedLogsRepository.MarkLogWatchedError = fakes.FakeError err := extractor.AddTransformerConfig(getTransformerConfig(rand.Int63())) diff --git a/pkg/datastore/postgres/repositories/checked_logs_repository.go b/pkg/datastore/postgres/repositories/checked_logs_repository.go index 113be3ed..b3f012cb 100644 --- a/pkg/datastore/postgres/repositories/checked_logs_repository.go +++ b/pkg/datastore/postgres/repositories/checked_logs_repository.go @@ -30,10 +30,10 @@ func NewCheckedLogsRepository(db *postgres.DB) CheckedLogsRepository { } // Return whether a given address + topic0 has been fetched on a previous run of vDB -func (repository CheckedLogsRepository) HaveLogsBeenChecked(addresses []string, topic0 string) (bool, error) { +func (repository CheckedLogsRepository) AlreadyWatchingLog(addresses []string, topic0 string) (bool, error) { for _, address := range addresses { var addressExists bool - getAddressExistsErr := repository.db.Get(&addressExists, `SELECT EXISTS(SELECT 1 FROM public.checked_logs WHERE contract_address = $1)`, address) + getAddressExistsErr := repository.db.Get(&addressExists, `SELECT EXISTS(SELECT 1 FROM public.watched_logs WHERE contract_address = $1)`, address) if getAddressExistsErr != nil { return false, getAddressExistsErr } @@ -42,7 +42,7 @@ func (repository CheckedLogsRepository) HaveLogsBeenChecked(addresses []string, } } var topicZeroExists bool - getTopicZeroExistsErr := repository.db.Get(&topicZeroExists, `SELECT EXISTS(SELECT 1 FROM public.checked_logs WHERE topic_zero = $1)`, topic0) + getTopicZeroExistsErr := repository.db.Get(&topicZeroExists, `SELECT EXISTS(SELECT 1 FROM public.watched_logs WHERE topic_zero = $1)`, topic0) if getTopicZeroExistsErr != nil { return false, getTopicZeroExistsErr } @@ -50,13 +50,13 @@ func (repository CheckedLogsRepository) HaveLogsBeenChecked(addresses []string, } // Persist that a given address + topic0 has is being fetched on this run of vDB -func (repository CheckedLogsRepository) MarkLogsChecked(addresses []string, topic0 string) error { +func (repository CheckedLogsRepository) MarkLogWatched(addresses []string, topic0 string) error { tx, txErr := repository.db.Beginx() if txErr != nil { return txErr } for _, address := range addresses { - _, insertErr := tx.Exec(`INSERT INTO public.checked_logs (contract_address, topic_zero) VALUES ($1, $2)`, address, topic0) + _, insertErr := tx.Exec(`INSERT INTO public.watched_logs (contract_address, topic_zero) VALUES ($1, $2)`, address, topic0) if insertErr != nil { rollbackErr := tx.Rollback() if rollbackErr != nil { diff --git a/pkg/datastore/postgres/repositories/checked_logs_repository_test.go b/pkg/datastore/postgres/repositories/checked_logs_repository_test.go index 8597d5fe..6bb9a8db 100644 --- a/pkg/datastore/postgres/repositories/checked_logs_repository_test.go +++ b/pkg/datastore/postgres/repositories/checked_logs_repository_test.go @@ -47,12 +47,12 @@ var _ = Describe("Checked logs repository", func() { Expect(closeErr).NotTo(HaveOccurred()) }) - Describe("HaveLogsBeenChecked", func() { + Describe("AlreadyWatchingLog", func() { It("returns true if all addresses and the topic0 are already present in the db", func() { - _, insertErr := db.Exec(`INSERT INTO public.checked_logs (contract_address, topic_zero) VALUES ($1, $2)`, fakeAddress, fakeTopicZero) + _, insertErr := db.Exec(`INSERT INTO public.watched_logs (contract_address, topic_zero) VALUES ($1, $2)`, fakeAddress, fakeTopicZero) Expect(insertErr).NotTo(HaveOccurred()) - hasBeenChecked, err := repository.HaveLogsBeenChecked(fakeAddresses, fakeTopicZero) + hasBeenChecked, err := repository.AlreadyWatchingLog(fakeAddresses, fakeTopicZero) Expect(err).NotTo(HaveOccurred()) Expect(hasBeenChecked).To(BeTrue()) @@ -62,13 +62,13 @@ var _ = Describe("Checked logs repository", func() { anotherFakeAddress := common.HexToAddress("0x" + fakes.RandomString(40)).Hex() anotherFakeTopicZero := common.HexToHash("0x" + fakes.RandomString(64)).Hex() // insert row with matching address but different topic0 - _, insertOneErr := db.Exec(`INSERT INTO public.checked_logs (contract_address, topic_zero) VALUES ($1, $2)`, fakeAddress, anotherFakeTopicZero) + _, insertOneErr := db.Exec(`INSERT INTO public.watched_logs (contract_address, topic_zero) VALUES ($1, $2)`, fakeAddress, anotherFakeTopicZero) Expect(insertOneErr).NotTo(HaveOccurred()) // insert row with matching topic0 but different address - _, insertTwoErr := db.Exec(`INSERT INTO public.checked_logs (contract_address, topic_zero) VALUES ($1, $2)`, anotherFakeAddress, fakeTopicZero) + _, insertTwoErr := db.Exec(`INSERT INTO public.watched_logs (contract_address, topic_zero) VALUES ($1, $2)`, anotherFakeAddress, fakeTopicZero) Expect(insertTwoErr).NotTo(HaveOccurred()) - hasBeenChecked, err := repository.HaveLogsBeenChecked(fakeAddresses, fakeTopicZero) + hasBeenChecked, err := repository.AlreadyWatchingLog(fakeAddresses, fakeTopicZero) Expect(err).NotTo(HaveOccurred()) Expect(hasBeenChecked).To(BeTrue()) @@ -76,10 +76,10 @@ var _ = Describe("Checked logs repository", func() { It("returns false if any address has not been checked", func() { anotherFakeAddress := common.HexToAddress("0x" + fakes.RandomString(40)).Hex() - _, insertErr := db.Exec(`INSERT INTO public.checked_logs (contract_address, topic_zero) VALUES ($1, $2)`, fakeAddress, fakeTopicZero) + _, insertErr := db.Exec(`INSERT INTO public.watched_logs (contract_address, topic_zero) VALUES ($1, $2)`, fakeAddress, fakeTopicZero) Expect(insertErr).NotTo(HaveOccurred()) - hasBeenChecked, err := repository.HaveLogsBeenChecked(append(fakeAddresses, anotherFakeAddress), fakeTopicZero) + hasBeenChecked, err := repository.AlreadyWatchingLog(append(fakeAddresses, anotherFakeAddress), fakeTopicZero) Expect(err).NotTo(HaveOccurred()) Expect(hasBeenChecked).To(BeFalse()) @@ -87,27 +87,27 @@ var _ = Describe("Checked logs repository", func() { It("returns false if topic0 has not been checked", func() { anotherFakeTopicZero := common.HexToHash("0x" + fakes.RandomString(64)).Hex() - _, insertErr := db.Exec(`INSERT INTO public.checked_logs (contract_address, topic_zero) VALUES ($1, $2)`, fakeAddress, anotherFakeTopicZero) + _, insertErr := db.Exec(`INSERT INTO public.watched_logs (contract_address, topic_zero) VALUES ($1, $2)`, fakeAddress, anotherFakeTopicZero) Expect(insertErr).NotTo(HaveOccurred()) - hasBeenChecked, err := repository.HaveLogsBeenChecked(fakeAddresses, fakeTopicZero) + hasBeenChecked, err := repository.AlreadyWatchingLog(fakeAddresses, fakeTopicZero) Expect(err).NotTo(HaveOccurred()) Expect(hasBeenChecked).To(BeFalse()) }) }) - Describe("MarkLogsChecked", func() { + Describe("MarkLogWatched", func() { It("adds a row for all of transformer's addresses + topic0", func() { anotherFakeAddress := common.HexToAddress("0x" + fakes.RandomString(40)).Hex() - err := repository.MarkLogsChecked(append(fakeAddresses, anotherFakeAddress), fakeTopicZero) + err := repository.MarkLogWatched(append(fakeAddresses, anotherFakeAddress), fakeTopicZero) Expect(err).NotTo(HaveOccurred()) var comboOneExists, comboTwoExists bool - getComboOneErr := db.Get(&comboOneExists, `SELECT EXISTS(SELECT 1 FROM public.checked_logs WHERE contract_address = $1 AND topic_zero = $2)`, fakeAddress, fakeTopicZero) + getComboOneErr := db.Get(&comboOneExists, `SELECT EXISTS(SELECT 1 FROM public.watched_logs WHERE contract_address = $1 AND topic_zero = $2)`, fakeAddress, fakeTopicZero) Expect(getComboOneErr).NotTo(HaveOccurred()) Expect(comboOneExists).To(BeTrue()) - getComboTwoErr := db.Get(&comboTwoExists, `SELECT EXISTS(SELECT 1 FROM public.checked_logs WHERE contract_address = $1 AND topic_zero = $2)`, anotherFakeAddress, fakeTopicZero) + getComboTwoErr := db.Get(&comboTwoExists, `SELECT EXISTS(SELECT 1 FROM public.watched_logs WHERE contract_address = $1 AND topic_zero = $2)`, anotherFakeAddress, fakeTopicZero) Expect(getComboTwoErr).NotTo(HaveOccurred()) Expect(comboTwoExists).To(BeTrue()) }) diff --git a/pkg/datastore/repository.go b/pkg/datastore/repository.go index ff0f02dd..4ea1e293 100644 --- a/pkg/datastore/repository.go +++ b/pkg/datastore/repository.go @@ -41,8 +41,8 @@ type CheckedHeadersRepository interface { } type CheckedLogsRepository interface { - HaveLogsBeenChecked(addresses []string, topic0 string) (bool, error) - MarkLogsChecked(addresses []string, topic0 string) error + AlreadyWatchingLog(addresses []string, topic0 string) (bool, error) + MarkLogWatched(addresses []string, topic0 string) error } type ContractRepository interface { diff --git a/pkg/fakes/checked_logs_repository.go b/pkg/fakes/checked_logs_repository.go index fca57e96..8506746a 100644 --- a/pkg/fakes/checked_logs_repository.go +++ b/pkg/fakes/checked_logs_repository.go @@ -17,23 +17,23 @@ package fakes type MockCheckedLogsRepository struct { - HasLogBeenCheckedAddresses []string - HasLogBeenCheckedError error - HasLogBeenCheckedReturn bool - HasLogBeenCheckedTopicZero string - MarkLogCheckedAddresses []string - MarkLogCheckedError error - MarkLogCheckedTopicZero string + AlreadyWatchingLogAddresses []string + AlreadyWatchingLogError error + AlreadyWatchingLogReturn bool + AlreadyWatchingLogTopicZero string + MarkLogWatchedAddresses []string + MarkLogWatchedError error + MarkLogWatchedTopicZero string } -func (repository *MockCheckedLogsRepository) HaveLogsBeenChecked(addresses []string, topic0 string) (bool, error) { - repository.HasLogBeenCheckedAddresses = addresses - repository.HasLogBeenCheckedTopicZero = topic0 - return repository.HasLogBeenCheckedReturn, repository.HasLogBeenCheckedError +func (repository *MockCheckedLogsRepository) AlreadyWatchingLog(addresses []string, topic0 string) (bool, error) { + repository.AlreadyWatchingLogAddresses = addresses + repository.AlreadyWatchingLogTopicZero = topic0 + return repository.AlreadyWatchingLogReturn, repository.AlreadyWatchingLogError } -func (repository *MockCheckedLogsRepository) MarkLogsChecked(addresses []string, topic0 string) error { - repository.MarkLogCheckedAddresses = addresses - repository.MarkLogCheckedTopicZero = topic0 - return repository.MarkLogCheckedError +func (repository *MockCheckedLogsRepository) MarkLogWatched(addresses []string, topic0 string) error { + repository.MarkLogWatchedAddresses = addresses + repository.MarkLogWatchedTopicZero = topic0 + return repository.MarkLogWatchedError } diff --git a/test_config/test_config.go b/test_config/test_config.go index f3dad64e..eb830d34 100644 --- a/test_config/test_config.go +++ b/test_config/test_config.go @@ -106,7 +106,6 @@ func CleanTestDB(db *postgres.DB) { db.MustExec("DELETE FROM addresses") db.MustExec("DELETE FROM blocks") db.MustExec("DELETE FROM checked_headers") - db.MustExec("DELETE FROM checked_logs") // can't delete from eth_nodes since this function is called after the required eth_node is persisted db.MustExec("DELETE FROM full_sync_logs") db.MustExec("DELETE FROM full_sync_receipts") @@ -119,6 +118,7 @@ func CleanTestDB(db *postgres.DB) { db.MustExec("DELETE FROM log_filters") db.MustExec("DELETE FROM queued_storage") db.MustExec("DELETE FROM watched_contracts") + db.MustExec("DELETE FROM watched_logs") } func CleanCheckedHeadersTable(db *postgres.DB, columnNames []string) { From 2b798e00e09df79a7c8ee03ba30e67addd7a1e56 Mon Sep 17 00:00:00 2001 From: Rob Mulholand Date: Fri, 13 Sep 2019 11:20:13 -0500 Subject: [PATCH 17/21] Cap random fake timestamp --- pkg/fakes/data.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkg/fakes/data.go b/pkg/fakes/data.go index 2865a29b..52266f1a 100644 --- a/pkg/fakes/data.go +++ b/pkg/fakes/data.go @@ -32,7 +32,7 @@ var ( FakeAddress = common.HexToAddress("0x" + RandomString(40)) FakeError = errors.New("failed") FakeHash = common.BytesToHash([]byte{1, 2, 3, 4, 5}) - fakeTimestamp = rand.Int63() + fakeTimestamp = rand.Int63n(1500000000) ) var rawFakeHeader, _ = json.Marshal(types.Header{}) From 4fa19be90a1bac7f84665fe260ef275e899d89b4 Mon Sep 17 00:00:00 2001 From: Rob Mulholand Date: Wed, 18 Sep 2019 20:55:15 -0500 Subject: [PATCH 18/21] Return error when no logs/headers available - Replaces bool and moots question of error/bool ordering - Also make event watcher execution synchronous --- cmd/execute.go | 10 +- libraries/shared/logs/delegator.go | 22 ++--- libraries/shared/logs/delegator_test.go | 30 +++--- libraries/shared/logs/extractor.go | 28 +++--- libraries/shared/logs/extractor_test.go | 42 ++++---- libraries/shared/mocks/log_delegator.go | 19 ++-- libraries/shared/mocks/log_extractor.go | 19 ++-- libraries/shared/watcher/event_watcher.go | 32 +++--- .../shared/watcher/event_watcher_test.go | 99 +++++++------------ 9 files changed, 137 insertions(+), 164 deletions(-) diff --git a/cmd/execute.go b/cmd/execute.go index 72adf856..b416cbb5 100644 --- a/cmd/execute.go +++ b/cmd/execute.go @@ -160,13 +160,9 @@ func watchEthEvents(w *watcher.EventWatcher, wg *syn.WaitGroup) { } else { recheck = constants.HeaderUnchecked } - errs := make(chan error) - go w.Execute(recheck, errs) - for { - select { - case err := <-errs: - LogWithCommand.Fatalf("error executing event watcher: %s", err.Error()) - } + err := w.Execute(recheck) + if err != nil { + LogWithCommand.Fatalf("error executing event watcher: %s", err.Error()) } } diff --git a/libraries/shared/logs/delegator.go b/libraries/shared/logs/delegator.go index 7d2b2a5c..79b398ad 100644 --- a/libraries/shared/logs/delegator.go +++ b/libraries/shared/logs/delegator.go @@ -25,16 +25,14 @@ import ( "github.com/vulcanize/vulcanizedb/pkg/datastore" ) -var ErrNoTransformers = errors.New("no event transformers configured in the log delegator") - -const ( - logsFound = true - noLogsFound = false +var ( + ErrNoLogs = errors.New("no logs available for transforming") + ErrNoTransformers = errors.New("no event transformers configured in the log delegator") ) type ILogDelegator interface { AddTransformer(t transformer.EventTransformer) - DelegateLogs() (error, bool) + DelegateLogs() error } type LogDelegator struct { @@ -48,28 +46,28 @@ func (delegator *LogDelegator) AddTransformer(t transformer.EventTransformer) { delegator.Chunker.AddConfig(t.GetConfig()) } -func (delegator *LogDelegator) DelegateLogs() (error, bool) { +func (delegator *LogDelegator) DelegateLogs() error { if len(delegator.Transformers) < 1 { - return ErrNoTransformers, noLogsFound + return ErrNoTransformers } persistedLogs, fetchErr := delegator.LogRepository.GetUntransformedHeaderSyncLogs() if fetchErr != nil { logrus.Errorf("error loading logs from db: %s", fetchErr.Error()) - return fetchErr, noLogsFound + return fetchErr } if len(persistedLogs) < 1 { - return nil, noLogsFound + return ErrNoLogs } transformErr := delegator.delegateLogs(persistedLogs) if transformErr != nil { logrus.Errorf("error transforming logs: %s", transformErr) - return transformErr, logsFound + return transformErr } - return nil, logsFound + return nil } func (delegator *LogDelegator) delegateLogs(logs []core.HeaderSyncLog) error { diff --git a/libraries/shared/logs/delegator_test.go b/libraries/shared/logs/delegator_test.go index f2bd581c..43813be8 100644 --- a/libraries/shared/logs/delegator_test.go +++ b/libraries/shared/logs/delegator_test.go @@ -59,10 +59,10 @@ var _ = Describe("Log delegator", func() { }) Describe("DelegateLogs", func() { - It("returns an error if no transformers configured", func() { + It("returns error if no transformers configured", func() { delegator := newDelegator(&fakes.MockHeaderSyncLogRepository{}) - err, _ := delegator.DelegateLogs() + err := delegator.DelegateLogs() Expect(err).To(HaveOccurred()) Expect(err).To(MatchError(logs.ErrNoTransformers)) @@ -70,35 +70,36 @@ var _ = Describe("Log delegator", func() { It("gets untransformed logs", func() { mockLogRepository := &fakes.MockHeaderSyncLogRepository{} + mockLogRepository.ReturnLogs = []core.HeaderSyncLog{{}} delegator := newDelegator(mockLogRepository) delegator.AddTransformer(&mocks.MockEventTransformer{}) - err, _ := delegator.DelegateLogs() + err := delegator.DelegateLogs() Expect(err).NotTo(HaveOccurred()) Expect(mockLogRepository.GetCalled).To(BeTrue()) }) - It("emits error if getting untransformed logs fails", func() { + It("returns error if getting untransformed logs fails", func() { mockLogRepository := &fakes.MockHeaderSyncLogRepository{} mockLogRepository.GetError = fakes.FakeError delegator := newDelegator(mockLogRepository) delegator.AddTransformer(&mocks.MockEventTransformer{}) - err, _ := delegator.DelegateLogs() + err := delegator.DelegateLogs() Expect(err).To(HaveOccurred()) Expect(err).To(MatchError(fakes.FakeError)) }) - It("emits that no logs were found if no logs returned", func() { + It("returns error that no logs were found if no logs returned", func() { delegator := newDelegator(&fakes.MockHeaderSyncLogRepository{}) delegator.AddTransformer(&mocks.MockEventTransformer{}) - err, logsFound := delegator.DelegateLogs() + err := delegator.DelegateLogs() - Expect(err).NotTo(HaveOccurred()) - Expect(logsFound).To(BeFalse()) + Expect(err).To(HaveOccurred()) + Expect(err).To(MatchError(logs.ErrNoLogs)) }) It("delegates chunked logs to transformers", func() { @@ -115,27 +116,27 @@ var _ = Describe("Log delegator", func() { delegator := newDelegator(mockLogRepository) delegator.AddTransformer(fakeTransformer) - err, _ := delegator.DelegateLogs() + err := delegator.DelegateLogs() Expect(err).NotTo(HaveOccurred()) Expect(fakeTransformer.ExecuteWasCalled).To(BeTrue()) Expect(fakeTransformer.PassedLogs).To(Equal(fakeHeaderSyncLogs)) }) - It("emits error if transformer returns an error", func() { + It("returns error if transformer returns an error", func() { mockLogRepository := &fakes.MockHeaderSyncLogRepository{} mockLogRepository.ReturnLogs = []core.HeaderSyncLog{{}} delegator := newDelegator(mockLogRepository) fakeTransformer := &mocks.MockEventTransformer{ExecuteError: fakes.FakeError} delegator.AddTransformer(fakeTransformer) - err, _ := delegator.DelegateLogs() + err := delegator.DelegateLogs() Expect(err).To(HaveOccurred()) Expect(err).To(MatchError(fakes.FakeError)) }) - It("emits logs found when logs returned and delegated", func() { + It("returns nil for error when logs returned and delegated", func() { fakeTransformer := &mocks.MockEventTransformer{} config := mocks.FakeTransformerConfig fakeTransformer.SetTransformerConfig(config) @@ -149,10 +150,9 @@ var _ = Describe("Log delegator", func() { delegator := newDelegator(mockLogRepository) delegator.AddTransformer(fakeTransformer) - err, logsFound := delegator.DelegateLogs() + err := delegator.DelegateLogs() Expect(err).NotTo(HaveOccurred()) - Expect(logsFound).To(BeTrue()) }) }) }) diff --git a/libraries/shared/logs/extractor.go b/libraries/shared/logs/extractor.go index 36fe6eda..bbd769c2 100644 --- a/libraries/shared/logs/extractor.go +++ b/libraries/shared/logs/extractor.go @@ -28,16 +28,14 @@ import ( "github.com/vulcanize/vulcanizedb/pkg/datastore" ) -var ErrNoWatchedAddresses = errors.New("no watched addresses configured in the log extractor") - -const ( - uncheckedHeadersFound = true - noUncheckedHeadersFound = false +var ( + ErrNoUncheckedHeaders = errors.New("no unchecked headers available for log fetching") + ErrNoWatchedAddresses = errors.New("no watched addresses configured in the log extractor") ) type ILogExtractor interface { AddTransformerConfig(config transformer.EventTransformerConfig) error - ExtractLogs(recheckHeaders constants.TransformerExecution) (error, bool) + ExtractLogs(recheckHeaders constants.TransformerExecution) error } type LogExtractor struct { @@ -71,50 +69,50 @@ func (extractor *LogExtractor) AddTransformerConfig(config transformer.EventTran } // Fetch and persist watched logs -func (extractor LogExtractor) ExtractLogs(recheckHeaders constants.TransformerExecution) (error, bool) { +func (extractor LogExtractor) ExtractLogs(recheckHeaders constants.TransformerExecution) error { if len(extractor.Addresses) < 1 { logrus.Errorf("error extracting logs: %s", ErrNoWatchedAddresses.Error()) - return ErrNoWatchedAddresses, noUncheckedHeadersFound + return ErrNoWatchedAddresses } uncheckedHeaders, uncheckedHeadersErr := extractor.CheckedHeadersRepository.UncheckedHeaders(*extractor.StartingBlock, -1, getCheckCount(recheckHeaders)) if uncheckedHeadersErr != nil { logrus.Errorf("error fetching missing headers: %s", uncheckedHeadersErr) - return uncheckedHeadersErr, noUncheckedHeadersFound + return uncheckedHeadersErr } if len(uncheckedHeaders) < 1 { - return nil, noUncheckedHeadersFound + return ErrNoUncheckedHeaders } for _, header := range uncheckedHeaders { logs, fetchLogsErr := extractor.Fetcher.FetchLogs(extractor.Addresses, extractor.Topics, header) if fetchLogsErr != nil { logError("error fetching logs for header: %s", fetchLogsErr, header) - return fetchLogsErr, uncheckedHeadersFound + return fetchLogsErr } if len(logs) > 0 { transactionsSyncErr := extractor.Syncer.SyncTransactions(header.Id, logs) if transactionsSyncErr != nil { logError("error syncing transactions: %s", transactionsSyncErr, header) - return transactionsSyncErr, uncheckedHeadersFound + return transactionsSyncErr } createLogsErr := extractor.LogRepository.CreateHeaderSyncLogs(header.Id, logs) if createLogsErr != nil { logError("error persisting logs: %s", createLogsErr, header) - return createLogsErr, uncheckedHeadersFound + return createLogsErr } } markHeaderCheckedErr := extractor.CheckedHeadersRepository.MarkHeaderChecked(header.Id) if markHeaderCheckedErr != nil { logError("error marking header checked: %s", markHeaderCheckedErr, header) - return markHeaderCheckedErr, uncheckedHeadersFound + return markHeaderCheckedErr } } - return nil, uncheckedHeadersFound + return nil } func earlierStartingBlockNumber(transformerBlock, watcherBlock int64) bool { diff --git a/libraries/shared/logs/extractor_test.go b/libraries/shared/logs/extractor_test.go index aecd8e28..c9f5b317 100644 --- a/libraries/shared/logs/extractor_test.go +++ b/libraries/shared/logs/extractor_test.go @@ -157,7 +157,7 @@ var _ = Describe("Log extractor", func() { Describe("ExtractLogs", func() { It("returns error if no watched addresses configured", func() { - err, _ := extractor.ExtractLogs(constants.HeaderUnchecked) + err := extractor.ExtractLogs(constants.HeaderUnchecked) Expect(err).To(HaveOccurred()) Expect(err).To(MatchError(logs.ErrNoWatchedAddresses)) @@ -171,7 +171,7 @@ var _ = Describe("Log extractor", func() { startingBlockNumber := rand.Int63() extractor.AddTransformerConfig(getTransformerConfig(startingBlockNumber)) - err, _ := extractor.ExtractLogs(constants.HeaderUnchecked) + err := extractor.ExtractLogs(constants.HeaderUnchecked) Expect(err).NotTo(HaveOccurred()) Expect(mockCheckedHeadersRepository.UncheckedHeadersStartingBlockNumber).To(Equal(startingBlockNumber)) @@ -188,7 +188,7 @@ var _ = Describe("Log extractor", func() { startingBlockNumber := rand.Int63() extractor.AddTransformerConfig(getTransformerConfig(startingBlockNumber)) - err, _ := extractor.ExtractLogs(constants.HeaderRecheck) + err := extractor.ExtractLogs(constants.HeaderRecheck) Expect(err).NotTo(HaveOccurred()) Expect(mockCheckedHeadersRepository.UncheckedHeadersStartingBlockNumber).To(Equal(startingBlockNumber)) @@ -197,13 +197,13 @@ var _ = Describe("Log extractor", func() { }) }) - It("emits error if getting unchecked headers fails", func() { + It("returns error if getting unchecked headers fails", func() { addTransformerConfig(extractor) mockCheckedHeadersRepository := &fakes.MockCheckedHeadersRepository{} mockCheckedHeadersRepository.UncheckedHeadersReturnError = fakes.FakeError extractor.CheckedHeadersRepository = mockCheckedHeadersRepository - err, _ := extractor.ExtractLogs(constants.HeaderUnchecked) + err := extractor.ExtractLogs(constants.HeaderUnchecked) Expect(err).To(HaveOccurred()) Expect(err).To(MatchError(fakes.FakeError)) @@ -215,20 +215,19 @@ var _ = Describe("Log extractor", func() { mockLogFetcher := &mocks.MockLogFetcher{} extractor.Fetcher = mockLogFetcher - err, _ := extractor.ExtractLogs(constants.HeaderUnchecked) + _ = extractor.ExtractLogs(constants.HeaderUnchecked) - Expect(err).NotTo(HaveOccurred()) Expect(mockLogFetcher.FetchCalled).To(BeFalse()) }) - It("emits that no unchecked headers were found", func() { + It("returns error that no unchecked headers were found", func() { addTransformerConfig(extractor) mockLogFetcher := &mocks.MockLogFetcher{} extractor.Fetcher = mockLogFetcher - _, uncheckedHeadersFound := extractor.ExtractLogs(constants.HeaderUnchecked) + err := extractor.ExtractLogs(constants.HeaderUnchecked) - Expect(uncheckedHeadersFound).To(BeFalse()) + Expect(err).To(MatchError(logs.ErrNoUncheckedHeaders)) }) }) @@ -245,7 +244,7 @@ var _ = Describe("Log extractor", func() { mockLogFetcher := &mocks.MockLogFetcher{} extractor.Fetcher = mockLogFetcher - err, _ := extractor.ExtractLogs(constants.HeaderUnchecked) + err := extractor.ExtractLogs(constants.HeaderUnchecked) Expect(err).NotTo(HaveOccurred()) Expect(mockLogFetcher.FetchCalled).To(BeTrue()) @@ -262,7 +261,7 @@ var _ = Describe("Log extractor", func() { mockLogFetcher.ReturnError = fakes.FakeError extractor.Fetcher = mockLogFetcher - err, _ := extractor.ExtractLogs(constants.HeaderUnchecked) + err := extractor.ExtractLogs(constants.HeaderUnchecked) Expect(err).To(HaveOccurred()) Expect(err).To(MatchError(fakes.FakeError)) @@ -275,7 +274,7 @@ var _ = Describe("Log extractor", func() { mockTransactionSyncer := &fakes.MockTransactionSyncer{} extractor.Syncer = mockTransactionSyncer - err, _ := extractor.ExtractLogs(constants.HeaderUnchecked) + err := extractor.ExtractLogs(constants.HeaderUnchecked) Expect(err).NotTo(HaveOccurred()) Expect(mockTransactionSyncer.SyncTransactionsCalled).To(BeFalse()) @@ -290,7 +289,7 @@ var _ = Describe("Log extractor", func() { mockTransactionSyncer := &fakes.MockTransactionSyncer{} extractor.Syncer = mockTransactionSyncer - err, _ := extractor.ExtractLogs(constants.HeaderUnchecked) + err := extractor.ExtractLogs(constants.HeaderUnchecked) Expect(err).NotTo(HaveOccurred()) Expect(mockTransactionSyncer.SyncTransactionsCalled).To(BeTrue()) @@ -304,7 +303,7 @@ var _ = Describe("Log extractor", func() { mockTransactionSyncer.SyncTransactionsError = fakes.FakeError extractor.Syncer = mockTransactionSyncer - err, _ := extractor.ExtractLogs(constants.HeaderUnchecked) + err := extractor.ExtractLogs(constants.HeaderUnchecked) Expect(err).To(HaveOccurred()) Expect(err).To(MatchError(fakes.FakeError)) @@ -324,7 +323,7 @@ var _ = Describe("Log extractor", func() { mockLogRepository := &fakes.MockHeaderSyncLogRepository{} extractor.LogRepository = mockLogRepository - err, _ := extractor.ExtractLogs(constants.HeaderUnchecked) + err := extractor.ExtractLogs(constants.HeaderUnchecked) Expect(err).NotTo(HaveOccurred()) Expect(mockLogRepository.PassedLogs).To(Equal(fakeLogs)) @@ -338,7 +337,7 @@ var _ = Describe("Log extractor", func() { mockLogRepository.CreateError = fakes.FakeError extractor.LogRepository = mockLogRepository - err, _ := extractor.ExtractLogs(constants.HeaderUnchecked) + err := extractor.ExtractLogs(constants.HeaderUnchecked) Expect(err).To(HaveOccurred()) Expect(err).To(MatchError(fakes.FakeError)) @@ -353,7 +352,7 @@ var _ = Describe("Log extractor", func() { mockCheckedHeadersRepository.UncheckedHeadersReturnHeaders = []core.Header{{Id: headerID}} extractor.CheckedHeadersRepository = mockCheckedHeadersRepository - err, _ := extractor.ExtractLogs(constants.HeaderUnchecked) + err := extractor.ExtractLogs(constants.HeaderUnchecked) Expect(err).NotTo(HaveOccurred()) Expect(mockCheckedHeadersRepository.MarkHeaderCheckedHeaderID).To(Equal(headerID)) @@ -367,20 +366,19 @@ var _ = Describe("Log extractor", func() { mockCheckedHeadersRepository.MarkHeaderCheckedReturnError = fakes.FakeError extractor.CheckedHeadersRepository = mockCheckedHeadersRepository - err, _ := extractor.ExtractLogs(constants.HeaderUnchecked) + err := extractor.ExtractLogs(constants.HeaderUnchecked) Expect(err).To(HaveOccurred()) Expect(err).To(MatchError(fakes.FakeError)) }) - It("emits that missing headers were found", func() { + It("returns nil for error if everything succeeds", func() { addUncheckedHeader(extractor) addTransformerConfig(extractor) - err, missingHeadersFound := extractor.ExtractLogs(constants.HeaderUnchecked) + err := extractor.ExtractLogs(constants.HeaderUnchecked) Expect(err).NotTo(HaveOccurred()) - Expect(missingHeadersFound).To(BeTrue()) }) }) }) diff --git a/libraries/shared/mocks/log_delegator.go b/libraries/shared/mocks/log_delegator.go index ad6b926e..b627877d 100644 --- a/libraries/shared/mocks/log_delegator.go +++ b/libraries/shared/mocks/log_delegator.go @@ -24,21 +24,22 @@ type MockLogDelegator struct { AddedTransformers []transformer.EventTransformer DelegateCallCount int DelegateErrors []error - LogsFound []bool } func (delegator *MockLogDelegator) AddTransformer(t transformer.EventTransformer) { delegator.AddedTransformers = append(delegator.AddedTransformers, t) } -func (delegator *MockLogDelegator) DelegateLogs() (error, bool) { +func (delegator *MockLogDelegator) DelegateLogs() error { delegator.DelegateCallCount++ - var delegateErrorThisRun error - delegateErrorThisRun, delegator.DelegateErrors = delegator.DelegateErrors[0], delegator.DelegateErrors[1:] - if delegateErrorThisRun != nil { - return delegateErrorThisRun, false + if len(delegator.DelegateErrors) > 1 { + var delegateErrorThisRun error + delegateErrorThisRun, delegator.DelegateErrors = delegator.DelegateErrors[0], delegator.DelegateErrors[1:] + return delegateErrorThisRun + } else if len(delegator.DelegateErrors) == 1 { + thisErr := delegator.DelegateErrors[0] + delegator.DelegateErrors = []error{} + return thisErr } - var logsFoundThisRun bool - logsFoundThisRun, delegator.LogsFound = delegator.LogsFound[0], delegator.LogsFound[1:] - return nil, logsFoundThisRun + return nil } diff --git a/libraries/shared/mocks/log_extractor.go b/libraries/shared/mocks/log_extractor.go index 363d27b5..8a7be9bc 100644 --- a/libraries/shared/mocks/log_extractor.go +++ b/libraries/shared/mocks/log_extractor.go @@ -26,7 +26,6 @@ type MockLogExtractor struct { AddTransformerConfigError error ExtractLogsCount int ExtractLogsErrors []error - UncheckedHeadersExist []bool } func (extractor *MockLogExtractor) AddTransformerConfig(config transformer.EventTransformerConfig) error { @@ -34,14 +33,16 @@ func (extractor *MockLogExtractor) AddTransformerConfig(config transformer.Event return extractor.AddTransformerConfigError } -func (extractor *MockLogExtractor) ExtractLogs(recheckHeaders constants.TransformerExecution) (error, bool) { +func (extractor *MockLogExtractor) ExtractLogs(recheckHeaders constants.TransformerExecution) error { extractor.ExtractLogsCount++ - var errorThisRun error - errorThisRun, extractor.ExtractLogsErrors = extractor.ExtractLogsErrors[0], extractor.ExtractLogsErrors[1:] - if errorThisRun != nil { - return errorThisRun, false + if len(extractor.ExtractLogsErrors) > 1 { + var errorThisRun error + errorThisRun, extractor.ExtractLogsErrors = extractor.ExtractLogsErrors[0], extractor.ExtractLogsErrors[1:] + return errorThisRun + } else if len(extractor.ExtractLogsErrors) == 1 { + thisErr := extractor.ExtractLogsErrors[0] + extractor.ExtractLogsErrors = []error{} + return thisErr } - var missingHeadersExist bool - missingHeadersExist, extractor.UncheckedHeadersExist = extractor.UncheckedHeadersExist[0], extractor.UncheckedHeadersExist[1:] - return nil, missingHeadersExist + return nil } diff --git a/libraries/shared/watcher/event_watcher.go b/libraries/shared/watcher/event_watcher.go index 80b0a023..d6028c5c 100644 --- a/libraries/shared/watcher/event_watcher.go +++ b/libraries/shared/watcher/event_watcher.go @@ -74,49 +74,53 @@ func (watcher *EventWatcher) AddTransformers(initializers []transformer.EventTra } // Extracts and delegates watched log events. -func (watcher *EventWatcher) Execute(recheckHeaders constants.TransformerExecution, errsChan chan error) { - extractErrsChan := make(chan error) +func (watcher *EventWatcher) Execute(recheckHeaders constants.TransformerExecution) error { delegateErrsChan := make(chan error) + extractErrsChan := make(chan error) + defer close(delegateErrsChan) + defer close(extractErrsChan) go watcher.extractLogs(recheckHeaders, extractErrsChan) go watcher.delegateLogs(delegateErrsChan) for { select { - case extractErr := <-extractErrsChan: - logrus.Errorf("error extracting logs in event watcher: %s", extractErr.Error()) - errsChan <- extractErr case delegateErr := <-delegateErrsChan: logrus.Errorf("error delegating logs in event watcher: %s", delegateErr.Error()) - errsChan <- delegateErr + return delegateErr + case extractErr := <-extractErrsChan: + logrus.Errorf("error extracting logs in event watcher: %s", extractErr.Error()) + return extractErr } } } func (watcher *EventWatcher) extractLogs(recheckHeaders constants.TransformerExecution, errs chan error) { - err, uncheckedHeadersFound := watcher.LogExtractor.ExtractLogs(recheckHeaders) - if err != nil { + err := watcher.LogExtractor.ExtractLogs(recheckHeaders) + if err != nil && err != logs.ErrNoUncheckedHeaders { errs <- err + return } - if uncheckedHeadersFound { + if err == logs.ErrNoUncheckedHeaders { + time.Sleep(NoNewDataPause) watcher.extractLogs(recheckHeaders, errs) } else { - time.Sleep(NoNewDataPause) watcher.extractLogs(recheckHeaders, errs) } } func (watcher *EventWatcher) delegateLogs(errs chan error) { - err, logsFound := watcher.LogDelegator.DelegateLogs() - if err != nil { + err := watcher.LogDelegator.DelegateLogs() + if err != nil && err != logs.ErrNoLogs { errs <- err + return } - if logsFound { + if err == logs.ErrNoLogs { + time.Sleep(NoNewDataPause) watcher.delegateLogs(errs) } else { - time.Sleep(NoNewDataPause) watcher.delegateLogs(errs) } } diff --git a/libraries/shared/watcher/event_watcher_test.go b/libraries/shared/watcher/event_watcher_test.go index 08206d39..69f9b5e7 100644 --- a/libraries/shared/watcher/event_watcher_test.go +++ b/libraries/shared/watcher/event_watcher_test.go @@ -17,6 +17,7 @@ package watcher_test import ( + "errors" . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" "github.com/vulcanize/vulcanizedb/libraries/shared/constants" @@ -26,6 +27,8 @@ import ( "github.com/vulcanize/vulcanizedb/pkg/fakes" ) +var errExecuteClosed = errors.New("this error means the mocks were finished executing") + var _ = Describe("Event Watcher", func() { var ( delegator *mocks.MockLogDelegator @@ -57,7 +60,8 @@ var _ = Describe("Event Watcher", func() { fakeTransformerTwo.FakeTransformerInitializer, } - eventWatcher.AddTransformers(initializers) + err := eventWatcher.AddTransformers(initializers) + Expect(err).NotTo(HaveOccurred()) }) It("adds initialized transformer to log delegator", func() { @@ -78,114 +82,87 @@ var _ = Describe("Event Watcher", func() { }) Describe("Execute", func() { - var errsChan chan error - - BeforeEach(func() { - errsChan = make(chan error) - }) It("extracts watched logs", func(done Done) { - delegator.DelegateErrors = []error{nil} - delegator.LogsFound = []bool{false} - extractor.ExtractLogsErrors = []error{nil} - extractor.UncheckedHeadersExist = []bool{false} + extractor.ExtractLogsErrors = []error{nil, errExecuteClosed} - go eventWatcher.Execute(constants.HeaderUnchecked, errsChan) + err := eventWatcher.Execute(constants.HeaderUnchecked) - Eventually(func() int { - return extractor.ExtractLogsCount - }).Should(Equal(1)) + Expect(err).To(MatchError(errExecuteClosed)) + Eventually(func() bool { + return extractor.ExtractLogsCount > 0 + }).Should(BeTrue()) close(done) }) It("returns error if extracting logs fails", func(done Done) { - delegator.DelegateErrors = []error{nil} - delegator.LogsFound = []bool{false} extractor.ExtractLogsErrors = []error{fakes.FakeError} - extractor.UncheckedHeadersExist = []bool{false} - go eventWatcher.Execute(constants.HeaderUnchecked, errsChan) + err := eventWatcher.Execute(constants.HeaderUnchecked) - Expect(<-errsChan).To(MatchError(fakes.FakeError)) + Expect(err).To(MatchError(fakes.FakeError)) close(done) }) It("extracts watched logs again if missing headers found", func(done Done) { - delegator.DelegateErrors = []error{nil} - delegator.LogsFound = []bool{false} - extractor.ExtractLogsErrors = []error{nil, nil} - extractor.UncheckedHeadersExist = []bool{true, false} + extractor.ExtractLogsErrors = []error{nil, errExecuteClosed} - go eventWatcher.Execute(constants.HeaderUnchecked, errsChan) + err := eventWatcher.Execute(constants.HeaderUnchecked) - Eventually(func() int { - return extractor.ExtractLogsCount - }).Should(Equal(2)) + Expect(err).To(MatchError(errExecuteClosed)) + Eventually(func() bool { + return extractor.ExtractLogsCount > 1 + }).Should(BeTrue()) close(done) }) It("returns error if extracting logs fails on subsequent run", func(done Done) { - delegator.DelegateErrors = []error{nil} - delegator.LogsFound = []bool{false} extractor.ExtractLogsErrors = []error{nil, fakes.FakeError} - extractor.UncheckedHeadersExist = []bool{true, false} - go eventWatcher.Execute(constants.HeaderUnchecked, errsChan) + err := eventWatcher.Execute(constants.HeaderUnchecked) - Expect(<-errsChan).To(MatchError(fakes.FakeError)) + Expect(err).To(MatchError(fakes.FakeError)) close(done) - }) - It("delegates untransformed logs", func(done Done) { - delegator.DelegateErrors = []error{nil} - delegator.LogsFound = []bool{false} - extractor.ExtractLogsErrors = []error{nil} - extractor.UncheckedHeadersExist = []bool{false} + It("delegates untransformed logs", func() { + delegator.DelegateErrors = []error{nil, errExecuteClosed} - go eventWatcher.Execute(constants.HeaderUnchecked, errsChan) + err := eventWatcher.Execute(constants.HeaderUnchecked) - Eventually(func() int { - return delegator.DelegateCallCount - }).Should(Equal(1)) - close(done) + Expect(err).To(MatchError(errExecuteClosed)) + Eventually(func() bool { + return delegator.DelegateCallCount > 0 + }).Should(BeTrue()) }) It("returns error if delegating logs fails", func(done Done) { - delegator.LogsFound = []bool{false} delegator.DelegateErrors = []error{fakes.FakeError} - extractor.ExtractLogsErrors = []error{nil} - extractor.UncheckedHeadersExist = []bool{false} - go eventWatcher.Execute(constants.HeaderUnchecked, errsChan) + err := eventWatcher.Execute(constants.HeaderUnchecked) - Expect(<-errsChan).To(MatchError(fakes.FakeError)) + Expect(err).To(MatchError(fakes.FakeError)) close(done) }) It("delegates logs again if untransformed logs found", func(done Done) { - delegator.DelegateErrors = []error{nil, nil} - delegator.LogsFound = []bool{true, false} - extractor.ExtractLogsErrors = []error{nil} - extractor.UncheckedHeadersExist = []bool{false} + delegator.DelegateErrors = []error{nil, nil, nil, errExecuteClosed} - go eventWatcher.Execute(constants.HeaderUnchecked, errsChan) + err := eventWatcher.Execute(constants.HeaderUnchecked) - Eventually(func() int { - return delegator.DelegateCallCount - }).Should(Equal(2)) + Expect(err).To(MatchError(errExecuteClosed)) + Eventually(func() bool { + return delegator.DelegateCallCount > 1 + }).Should(BeTrue()) close(done) }) It("returns error if delegating logs fails on subsequent run", func(done Done) { delegator.DelegateErrors = []error{nil, fakes.FakeError} - delegator.LogsFound = []bool{true, false} - extractor.ExtractLogsErrors = []error{nil} - extractor.UncheckedHeadersExist = []bool{false} - go eventWatcher.Execute(constants.HeaderUnchecked, errsChan) + err := eventWatcher.Execute(constants.HeaderUnchecked) - Expect(<-errsChan).To(MatchError(fakes.FakeError)) + Expect(err).To(MatchError(fakes.FakeError)) close(done) }) }) From f83e996ab85e05ddfe62a3bdea028a33e43f2e6f Mon Sep 17 00:00:00 2001 From: Rob Mulholand Date: Wed, 18 Sep 2019 21:39:34 -0500 Subject: [PATCH 19/21] Update comments in checked headers repository --- .../postgres/repositories/checked_headers_repository.go | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkg/datastore/postgres/repositories/checked_headers_repository.go b/pkg/datastore/postgres/repositories/checked_headers_repository.go index 8d870461..8eb4767f 100644 --- a/pkg/datastore/postgres/repositories/checked_headers_repository.go +++ b/pkg/datastore/postgres/repositories/checked_headers_repository.go @@ -33,19 +33,19 @@ func NewCheckedHeadersRepository(db *postgres.DB) CheckedHeadersRepository { return CheckedHeadersRepository{db: db} } -// Adds header_id to the checked_headers table, or increment check_count if header_id already present +// Increment check_count for header func (repo CheckedHeadersRepository) MarkHeaderChecked(headerID int64) error { _, err := repo.db.Exec(insertCheckedHeaderQuery, headerID) return err } -// Remove checked_headers rows with block number >= starting block number +// Zero out check count for headers with block number >= startingBlockNumber func (repo CheckedHeadersRepository) MarkHeadersUnchecked(startingBlockNumber int64) error { _, err := repo.db.Exec(`UPDATE public.headers SET check_count = 0 WHERE block_number >= $1`, startingBlockNumber) return err } -// Return header_id if not present in checked_headers or its check_count is < passed checkCount +// Return header if check_count < passed checkCount func (repo CheckedHeadersRepository) UncheckedHeaders(startingBlockNumber, endingBlockNumber, checkCount int64) ([]core.Header, error) { var result []core.Header var query string From b96a1ee1c6b281e71c9b6f8e6c4cb012e3033194 Mon Sep 17 00:00:00 2001 From: Rob Mulholand Date: Wed, 18 Sep 2019 22:17:02 -0500 Subject: [PATCH 20/21] Put secondary processes to sleep in event watcher tests - Prevent extract/delegate from spinning when other side is being simulated --- libraries/shared/watcher/event_watcher_test.go | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/libraries/shared/watcher/event_watcher_test.go b/libraries/shared/watcher/event_watcher_test.go index 69f9b5e7..78221db9 100644 --- a/libraries/shared/watcher/event_watcher_test.go +++ b/libraries/shared/watcher/event_watcher_test.go @@ -21,6 +21,7 @@ import ( . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" "github.com/vulcanize/vulcanizedb/libraries/shared/constants" + "github.com/vulcanize/vulcanizedb/libraries/shared/logs" "github.com/vulcanize/vulcanizedb/libraries/shared/mocks" "github.com/vulcanize/vulcanizedb/libraries/shared/transformer" "github.com/vulcanize/vulcanizedb/libraries/shared/watcher" @@ -84,6 +85,7 @@ var _ = Describe("Event Watcher", func() { Describe("Execute", func() { It("extracts watched logs", func(done Done) { + delegator.DelegateErrors = []error{logs.ErrNoLogs} extractor.ExtractLogsErrors = []error{nil, errExecuteClosed} err := eventWatcher.Execute(constants.HeaderUnchecked) @@ -96,6 +98,7 @@ var _ = Describe("Event Watcher", func() { }) It("returns error if extracting logs fails", func(done Done) { + delegator.DelegateErrors = []error{logs.ErrNoLogs} extractor.ExtractLogsErrors = []error{fakes.FakeError} err := eventWatcher.Execute(constants.HeaderUnchecked) @@ -105,6 +108,7 @@ var _ = Describe("Event Watcher", func() { }) It("extracts watched logs again if missing headers found", func(done Done) { + delegator.DelegateErrors = []error{logs.ErrNoLogs} extractor.ExtractLogsErrors = []error{nil, errExecuteClosed} err := eventWatcher.Execute(constants.HeaderUnchecked) @@ -117,6 +121,7 @@ var _ = Describe("Event Watcher", func() { }) It("returns error if extracting logs fails on subsequent run", func(done Done) { + delegator.DelegateErrors = []error{logs.ErrNoLogs} extractor.ExtractLogsErrors = []error{nil, fakes.FakeError} err := eventWatcher.Execute(constants.HeaderUnchecked) @@ -127,6 +132,7 @@ var _ = Describe("Event Watcher", func() { It("delegates untransformed logs", func() { delegator.DelegateErrors = []error{nil, errExecuteClosed} + extractor.ExtractLogsErrors = []error{logs.ErrNoUncheckedHeaders} err := eventWatcher.Execute(constants.HeaderUnchecked) @@ -138,6 +144,7 @@ var _ = Describe("Event Watcher", func() { It("returns error if delegating logs fails", func(done Done) { delegator.DelegateErrors = []error{fakes.FakeError} + extractor.ExtractLogsErrors = []error{logs.ErrNoUncheckedHeaders} err := eventWatcher.Execute(constants.HeaderUnchecked) @@ -147,6 +154,7 @@ var _ = Describe("Event Watcher", func() { It("delegates logs again if untransformed logs found", func(done Done) { delegator.DelegateErrors = []error{nil, nil, nil, errExecuteClosed} + extractor.ExtractLogsErrors = []error{logs.ErrNoUncheckedHeaders} err := eventWatcher.Execute(constants.HeaderUnchecked) @@ -159,6 +167,7 @@ var _ = Describe("Event Watcher", func() { It("returns error if delegating logs fails on subsequent run", func(done Done) { delegator.DelegateErrors = []error{nil, fakes.FakeError} + extractor.ExtractLogsErrors = []error{logs.ErrNoUncheckedHeaders} err := eventWatcher.Execute(constants.HeaderUnchecked) From 3897b288af3c2443ce22c47eb88fa0b07caeed70 Mon Sep 17 00:00:00 2001 From: Rob Mulholand Date: Fri, 20 Sep 2019 10:38:16 -0500 Subject: [PATCH 21/21] Update Geth dep to 1.9.5 --- go.mod | 2 +- go.sum | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/go.mod b/go.mod index 3f13c0a6..c7cd332b 100644 --- a/go.mod +++ b/go.mod @@ -9,7 +9,7 @@ require ( github.com/deckarep/golang-set v1.7.1 // indirect github.com/edsrzf/mmap-go v1.0.0 // indirect github.com/elastic/gosigar v0.10.4 // indirect - github.com/ethereum/go-ethereum v1.9.1 + github.com/ethereum/go-ethereum v1.9.5 github.com/fjl/memsize v0.0.0-20190710130421-bcb5799ab5e5 // indirect github.com/gballet/go-libpcsclite v0.0.0-20190607065134-2772fd86a8ff // indirect github.com/gorilla/websocket v1.4.1 // indirect diff --git a/go.sum b/go.sum index fa407d6f..ffc6e746 100644 --- a/go.sum +++ b/go.sum @@ -50,6 +50,8 @@ github.com/elastic/gosigar v0.10.4/go.mod h1:cdorVVzy1fhmEqmtgqkoE3bYtCfSCkVyjTy github.com/ethereum/go-ethereum v1.9.0/go.mod h1:PwpWDrCLZrV+tfrhqqF6kPknbISMHaJv9Ln3kPCZLwY= github.com/ethereum/go-ethereum v1.9.1 h1:MrdTRvKIa3apdx6NW1azzSgl8BQB1eTBVSUmFhuztaU= github.com/ethereum/go-ethereum v1.9.1/go.mod h1:PwpWDrCLZrV+tfrhqqF6kPknbISMHaJv9Ln3kPCZLwY= +github.com/ethereum/go-ethereum v1.9.5 h1:4oxsF+/3N/sTgda9XTVG4r+wMVLsveziSMcK83hPbsk= +github.com/ethereum/go-ethereum v1.9.5/go.mod h1:PwpWDrCLZrV+tfrhqqF6kPknbISMHaJv9Ln3kPCZLwY= github.com/fjl/memsize v0.0.0-20190710130421-bcb5799ab5e5 h1:FtmdgXiUlNeRsoNMFlKLDt+S+6hbjVMEW6RGQ7aUf7c= github.com/fjl/memsize v0.0.0-20190710130421-bcb5799ab5e5/go.mod h1:VvhXpOYNQvB+uIk2RvXzuaQtkQJzzIx6lSBe1xv7hi0= github.com/fsnotify/fsnotify v1.4.7 h1:IXs+QLmnXW2CcXuY+8Mzv/fWEsPGWxqefPtCP5CnV9I=