2018-09-04 16:35:38 +00:00
|
|
|
// Copyright 2018 Vulcanize
|
|
|
|
//
|
|
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
// you may not use this file except in compliance with the License.
|
|
|
|
// You may obtain a copy of the License at
|
|
|
|
//
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
//
|
|
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
// See the License for the specific language governing permissions and
|
|
|
|
// limitations under the License.
|
|
|
|
|
|
|
|
package ilk_test
|
|
|
|
|
|
|
|
import (
|
|
|
|
"database/sql"
|
|
|
|
|
|
|
|
. "github.com/onsi/ginkgo"
|
|
|
|
. "github.com/onsi/gomega"
|
|
|
|
|
|
|
|
"github.com/vulcanize/vulcanizedb/pkg/core"
|
2018-10-11 18:21:49 +00:00
|
|
|
"github.com/vulcanize/vulcanizedb/pkg/datastore"
|
2018-10-02 22:25:38 +00:00
|
|
|
"github.com/vulcanize/vulcanizedb/pkg/datastore/postgres"
|
2018-09-04 16:35:38 +00:00
|
|
|
"github.com/vulcanize/vulcanizedb/pkg/datastore/postgres/repositories"
|
2018-10-11 20:25:14 +00:00
|
|
|
"github.com/vulcanize/vulcanizedb/pkg/fakes"
|
2018-09-04 16:35:38 +00:00
|
|
|
"github.com/vulcanize/vulcanizedb/pkg/transformers/drip_file/ilk"
|
|
|
|
"github.com/vulcanize/vulcanizedb/pkg/transformers/test_data"
|
|
|
|
"github.com/vulcanize/vulcanizedb/test_config"
|
|
|
|
)
|
|
|
|
|
|
|
|
var _ = Describe("Drip file ilk repository", func() {
|
2018-10-11 18:21:49 +00:00
|
|
|
var (
|
|
|
|
db *postgres.DB
|
|
|
|
dripFileIlkRepository ilk.Repository
|
|
|
|
err error
|
|
|
|
headerRepository datastore.HeaderRepository
|
|
|
|
)
|
|
|
|
|
|
|
|
BeforeEach(func() {
|
|
|
|
db = test_config.NewTestDB(test_config.NewTestNode())
|
|
|
|
test_config.CleanTestDB(db)
|
|
|
|
headerRepository = repositories.NewHeaderRepository(db)
|
|
|
|
dripFileIlkRepository = ilk.NewDripFileIlkRepository(db)
|
|
|
|
})
|
|
|
|
|
2018-09-04 16:35:38 +00:00
|
|
|
Describe("Create", func() {
|
2018-10-11 18:21:49 +00:00
|
|
|
var headerID int64
|
2018-10-02 22:25:38 +00:00
|
|
|
|
|
|
|
BeforeEach(func() {
|
2018-10-11 20:25:14 +00:00
|
|
|
headerID, err = headerRepository.CreateOrUpdateHeader(fakes.FakeHeader)
|
2018-09-04 16:35:38 +00:00
|
|
|
Expect(err).NotTo(HaveOccurred())
|
|
|
|
|
2018-10-02 22:25:38 +00:00
|
|
|
err = dripFileIlkRepository.Create(headerID, []ilk.DripFileIlkModel{test_data.DripFileIlkModel})
|
2018-09-04 16:35:38 +00:00
|
|
|
Expect(err).NotTo(HaveOccurred())
|
2018-10-11 18:21:49 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
It("adds a drip file ilk event", func() {
|
2018-09-04 16:35:38 +00:00
|
|
|
var dbDripFileIlk ilk.DripFileIlkModel
|
2018-10-19 20:30:07 +00:00
|
|
|
err = db.Get(&dbDripFileIlk, `SELECT ilk, vow, tax, log_idx, tx_idx, raw_log FROM maker.drip_file_ilk WHERE header_id = $1`, headerID)
|
2018-09-04 16:35:38 +00:00
|
|
|
Expect(err).NotTo(HaveOccurred())
|
|
|
|
Expect(dbDripFileIlk.Ilk).To(Equal(test_data.DripFileIlkModel.Ilk))
|
|
|
|
Expect(dbDripFileIlk.Vow).To(Equal(test_data.DripFileIlkModel.Vow))
|
|
|
|
Expect(dbDripFileIlk.Tax).To(Equal(test_data.DripFileIlkModel.Tax))
|
2018-10-19 20:30:07 +00:00
|
|
|
Expect(dbDripFileIlk.LogIndex).To(Equal(test_data.DripFileIlkModel.LogIndex))
|
2018-09-04 16:35:38 +00:00
|
|
|
Expect(dbDripFileIlk.TransactionIndex).To(Equal(test_data.DripFileIlkModel.TransactionIndex))
|
|
|
|
Expect(dbDripFileIlk.Raw).To(MatchJSON(test_data.DripFileIlkModel.Raw))
|
|
|
|
})
|
|
|
|
|
2018-10-02 22:25:38 +00:00
|
|
|
It("marks header as checked for logs", func() {
|
2018-09-04 16:35:38 +00:00
|
|
|
Expect(err).NotTo(HaveOccurred())
|
2018-10-02 22:25:38 +00:00
|
|
|
var headerChecked bool
|
|
|
|
err = db.Get(&headerChecked, `SELECT drip_file_ilk_checked FROM public.checked_headers WHERE header_id = $1`, headerID)
|
|
|
|
Expect(err).NotTo(HaveOccurred())
|
|
|
|
Expect(headerChecked).To(BeTrue())
|
|
|
|
})
|
|
|
|
|
|
|
|
It("does not duplicate drip file events", func() {
|
|
|
|
err = dripFileIlkRepository.Create(headerID, []ilk.DripFileIlkModel{test_data.DripFileIlkModel})
|
2018-09-04 16:35:38 +00:00
|
|
|
|
|
|
|
Expect(err).To(HaveOccurred())
|
|
|
|
Expect(err.Error()).To(ContainSubstring("pq: duplicate key value violates unique constraint"))
|
|
|
|
})
|
|
|
|
|
|
|
|
It("removes drip file if corresponding header is deleted", func() {
|
|
|
|
_, err = db.Exec(`DELETE FROM headers WHERE id = $1`, headerID)
|
|
|
|
|
|
|
|
Expect(err).NotTo(HaveOccurred())
|
|
|
|
var dbDripFileIlk ilk.DripFileIlkModel
|
2018-10-19 20:30:07 +00:00
|
|
|
err = db.Get(&dbDripFileIlk, `SELECT ilk, vow, tax, log_idx, tx_idx, raw_log FROM maker.drip_file_ilk WHERE header_id = $1`, headerID)
|
2018-09-04 16:35:38 +00:00
|
|
|
Expect(err).To(HaveOccurred())
|
|
|
|
Expect(err).To(MatchError(sql.ErrNoRows))
|
|
|
|
})
|
|
|
|
})
|
|
|
|
|
2018-10-02 22:25:38 +00:00
|
|
|
Describe("MarkHeaderChecked", func() {
|
2018-10-11 18:21:49 +00:00
|
|
|
var headerID int64
|
2018-10-02 22:25:38 +00:00
|
|
|
|
|
|
|
BeforeEach(func() {
|
2018-10-11 20:25:14 +00:00
|
|
|
headerID, err = headerRepository.CreateOrUpdateHeader(fakes.FakeHeader)
|
2018-10-02 22:25:38 +00:00
|
|
|
Expect(err).NotTo(HaveOccurred())
|
|
|
|
})
|
|
|
|
|
|
|
|
It("creates a row for a new headerID", func() {
|
|
|
|
err = dripFileIlkRepository.MarkHeaderChecked(headerID)
|
|
|
|
|
|
|
|
Expect(err).NotTo(HaveOccurred())
|
|
|
|
var headerChecked bool
|
|
|
|
err = db.Get(&headerChecked, `SELECT drip_file_ilk_checked FROM public.checked_headers WHERE header_id = $1`, headerID)
|
|
|
|
Expect(err).NotTo(HaveOccurred())
|
|
|
|
Expect(headerChecked).To(BeTrue())
|
|
|
|
})
|
|
|
|
|
|
|
|
It("updates row when headerID already exists", func() {
|
|
|
|
_, err = db.Exec(`INSERT INTO public.checked_headers (header_id) VALUES ($1)`, headerID)
|
|
|
|
|
|
|
|
err = dripFileIlkRepository.MarkHeaderChecked(headerID)
|
|
|
|
|
|
|
|
Expect(err).NotTo(HaveOccurred())
|
|
|
|
var headerChecked bool
|
|
|
|
err = db.Get(&headerChecked, `SELECT drip_file_ilk_checked FROM public.checked_headers WHERE header_id = $1`, headerID)
|
|
|
|
Expect(err).NotTo(HaveOccurred())
|
|
|
|
Expect(headerChecked).To(BeTrue())
|
|
|
|
})
|
|
|
|
})
|
|
|
|
|
2018-09-04 16:35:38 +00:00
|
|
|
Describe("MissingHeaders", func() {
|
2018-10-11 18:21:49 +00:00
|
|
|
var (
|
|
|
|
startingBlock, endingBlock, dripFileBlock int64
|
|
|
|
blockNumbers, headerIDs []int64
|
|
|
|
)
|
|
|
|
|
|
|
|
BeforeEach(func() {
|
|
|
|
startingBlock = GinkgoRandomSeed()
|
|
|
|
dripFileBlock = startingBlock + 1
|
|
|
|
endingBlock = startingBlock + 2
|
|
|
|
|
|
|
|
blockNumbers = []int64{startingBlock, dripFileBlock, endingBlock, endingBlock + 1}
|
|
|
|
|
|
|
|
headerIDs = []int64{}
|
2018-09-04 16:35:38 +00:00
|
|
|
for _, n := range blockNumbers {
|
2018-10-11 20:25:14 +00:00
|
|
|
headerID, err := headerRepository.CreateOrUpdateHeader(fakes.GetFakeHeader(n))
|
2018-09-04 16:35:38 +00:00
|
|
|
Expect(err).NotTo(HaveOccurred())
|
2018-10-11 18:21:49 +00:00
|
|
|
headerIDs = append(headerIDs, headerID)
|
2018-09-04 16:35:38 +00:00
|
|
|
}
|
2018-10-11 18:21:49 +00:00
|
|
|
|
|
|
|
})
|
|
|
|
|
|
|
|
It("returns headers with no associated drip file event", func() {
|
2018-10-02 22:25:38 +00:00
|
|
|
err := dripFileIlkRepository.MarkHeaderChecked(headerIDs[1])
|
2018-09-04 16:35:38 +00:00
|
|
|
Expect(err).NotTo(HaveOccurred())
|
|
|
|
|
2018-10-11 18:21:49 +00:00
|
|
|
headers, err := dripFileIlkRepository.MissingHeaders(startingBlock, endingBlock)
|
2018-09-04 16:35:38 +00:00
|
|
|
|
|
|
|
Expect(err).NotTo(HaveOccurred())
|
|
|
|
Expect(len(headers)).To(Equal(2))
|
2018-10-11 18:21:49 +00:00
|
|
|
Expect(headers[0].BlockNumber).To(Or(Equal(startingBlock), Equal(endingBlock)))
|
|
|
|
Expect(headers[1].BlockNumber).To(Or(Equal(startingBlock), Equal(endingBlock)))
|
2018-09-04 16:35:38 +00:00
|
|
|
})
|
|
|
|
|
2018-10-02 22:25:38 +00:00
|
|
|
It("only treats headers as checked if drip file ilk logs have been checked", func() {
|
|
|
|
_, err := db.Exec(`INSERT INTO public.checked_headers (header_id) VALUES ($1)`, headerIDs[1])
|
|
|
|
Expect(err).NotTo(HaveOccurred())
|
|
|
|
|
2018-10-11 18:21:49 +00:00
|
|
|
headers, err := dripFileIlkRepository.MissingHeaders(startingBlock, endingBlock)
|
2018-10-02 22:25:38 +00:00
|
|
|
|
|
|
|
Expect(err).NotTo(HaveOccurred())
|
|
|
|
Expect(len(headers)).To(Equal(3))
|
2018-10-11 18:21:49 +00:00
|
|
|
Expect(headers[0].BlockNumber).To(Or(Equal(startingBlock), Equal(endingBlock), Equal(dripFileBlock)))
|
|
|
|
Expect(headers[1].BlockNumber).To(Or(Equal(startingBlock), Equal(endingBlock), Equal(dripFileBlock)))
|
|
|
|
Expect(headers[2].BlockNumber).To(Or(Equal(startingBlock), Equal(endingBlock), Equal(dripFileBlock)))
|
2018-10-02 22:25:38 +00:00
|
|
|
})
|
|
|
|
|
2018-09-04 16:35:38 +00:00
|
|
|
It("only returns headers associated with the current node", func() {
|
|
|
|
dbTwo := test_config.NewTestDB(core.Node{ID: "second"})
|
|
|
|
headerRepositoryTwo := repositories.NewHeaderRepository(dbTwo)
|
|
|
|
for _, n := range blockNumbers {
|
2018-10-11 20:25:14 +00:00
|
|
|
_, err = headerRepositoryTwo.CreateOrUpdateHeader(fakes.GetFakeHeader(n))
|
2018-09-04 16:35:38 +00:00
|
|
|
Expect(err).NotTo(HaveOccurred())
|
|
|
|
}
|
|
|
|
dripFileIlkRepositoryTwo := ilk.NewDripFileIlkRepository(dbTwo)
|
2018-10-02 22:25:38 +00:00
|
|
|
err := dripFileIlkRepository.MarkHeaderChecked(headerIDs[0])
|
2018-09-04 16:35:38 +00:00
|
|
|
Expect(err).NotTo(HaveOccurred())
|
|
|
|
|
|
|
|
nodeOneMissingHeaders, err := dripFileIlkRepository.MissingHeaders(blockNumbers[0], blockNumbers[len(blockNumbers)-1])
|
|
|
|
Expect(err).NotTo(HaveOccurred())
|
|
|
|
Expect(len(nodeOneMissingHeaders)).To(Equal(len(blockNumbers) - 1))
|
|
|
|
|
|
|
|
nodeTwoMissingHeaders, err := dripFileIlkRepositoryTwo.MissingHeaders(blockNumbers[0], blockNumbers[len(blockNumbers)-1])
|
|
|
|
Expect(err).NotTo(HaveOccurred())
|
|
|
|
Expect(len(nodeTwoMissingHeaders)).To(Equal(len(blockNumbers)))
|
|
|
|
})
|
|
|
|
})
|
|
|
|
})
|