2018-09-06 21:42:24 +00:00
|
|
|
// Copyright 2018 Vulcanize
|
|
|
|
//
|
|
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
// you may not use this file except in compliance with the License.
|
|
|
|
// You may obtain a copy of the License at
|
|
|
|
//
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
//
|
|
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
// See the License for the specific language governing permissions and
|
|
|
|
// limitations under the License.
|
|
|
|
|
2018-09-05 14:24:28 +00:00
|
|
|
package ilk_test
|
2018-08-29 14:12:29 +00:00
|
|
|
|
|
|
|
import (
|
|
|
|
"database/sql"
|
2018-09-06 21:42:24 +00:00
|
|
|
|
2018-08-29 14:12:29 +00:00
|
|
|
. "github.com/onsi/ginkgo"
|
|
|
|
. "github.com/onsi/gomega"
|
2018-09-06 21:42:24 +00:00
|
|
|
|
2018-08-29 14:12:29 +00:00
|
|
|
"github.com/vulcanize/vulcanizedb/pkg/core"
|
2018-10-02 22:25:38 +00:00
|
|
|
"github.com/vulcanize/vulcanizedb/pkg/datastore/postgres"
|
2018-08-29 14:12:29 +00:00
|
|
|
"github.com/vulcanize/vulcanizedb/pkg/datastore/postgres/repositories"
|
2018-09-05 14:24:28 +00:00
|
|
|
"github.com/vulcanize/vulcanizedb/pkg/transformers/pit_file/ilk"
|
2018-08-29 14:12:29 +00:00
|
|
|
"github.com/vulcanize/vulcanizedb/pkg/transformers/test_data"
|
|
|
|
"github.com/vulcanize/vulcanizedb/test_config"
|
|
|
|
)
|
|
|
|
|
2018-09-05 14:24:28 +00:00
|
|
|
var _ = Describe("Pit file ilk repository", func() {
|
2018-08-29 14:12:29 +00:00
|
|
|
Describe("Create", func() {
|
2018-10-02 22:25:38 +00:00
|
|
|
var (
|
|
|
|
db *postgres.DB
|
|
|
|
pitFileIlkRepository ilk.Repository
|
|
|
|
err error
|
|
|
|
headerID int64
|
|
|
|
)
|
|
|
|
|
|
|
|
BeforeEach(func() {
|
|
|
|
db = test_config.NewTestDB(core.Node{})
|
2018-08-29 14:12:29 +00:00
|
|
|
test_config.CleanTestDB(db)
|
|
|
|
headerRepository := repositories.NewHeaderRepository(db)
|
2018-10-02 22:25:38 +00:00
|
|
|
headerID, err = headerRepository.CreateOrUpdateHeader(core.Header{})
|
2018-08-29 14:12:29 +00:00
|
|
|
Expect(err).NotTo(HaveOccurred())
|
2018-10-02 22:25:38 +00:00
|
|
|
pitFileIlkRepository = ilk.NewPitFileIlkRepository(db)
|
|
|
|
})
|
2018-08-29 14:12:29 +00:00
|
|
|
|
2018-10-02 22:25:38 +00:00
|
|
|
It("adds a pit file ilk event", func() {
|
|
|
|
err = pitFileIlkRepository.Create(headerID, []ilk.PitFileIlkModel{test_data.PitFileIlkModel})
|
2018-08-29 14:12:29 +00:00
|
|
|
|
|
|
|
Expect(err).NotTo(HaveOccurred())
|
2018-09-05 14:24:28 +00:00
|
|
|
var dbPitFile ilk.PitFileIlkModel
|
|
|
|
err = db.Get(&dbPitFile, `SELECT ilk, what, data, tx_idx, raw_log FROM maker.pit_file_ilk WHERE header_id = $1`, headerID)
|
2018-08-29 14:12:29 +00:00
|
|
|
Expect(err).NotTo(HaveOccurred())
|
2018-09-05 14:24:28 +00:00
|
|
|
Expect(dbPitFile.Ilk).To(Equal(test_data.PitFileIlkModel.Ilk))
|
|
|
|
Expect(dbPitFile.What).To(Equal(test_data.PitFileIlkModel.What))
|
|
|
|
Expect(dbPitFile.Data).To(Equal(test_data.PitFileIlkModel.Data))
|
|
|
|
Expect(dbPitFile.TransactionIndex).To(Equal(test_data.PitFileIlkModel.TransactionIndex))
|
|
|
|
Expect(dbPitFile.Raw).To(MatchJSON(test_data.PitFileIlkModel.Raw))
|
2018-08-29 14:12:29 +00:00
|
|
|
})
|
|
|
|
|
2018-10-02 22:25:38 +00:00
|
|
|
It("marks header as checked for logs", func() {
|
|
|
|
err = pitFileIlkRepository.Create(headerID, []ilk.PitFileIlkModel{test_data.PitFileIlkModel})
|
|
|
|
|
2018-08-29 14:12:29 +00:00
|
|
|
Expect(err).NotTo(HaveOccurred())
|
2018-10-02 22:25:38 +00:00
|
|
|
var headerChecked bool
|
|
|
|
err = db.Get(&headerChecked, `SELECT pit_file_ilk_checked FROM public.checked_headers WHERE header_id = $1`, headerID)
|
|
|
|
Expect(err).NotTo(HaveOccurred())
|
|
|
|
Expect(headerChecked).To(BeTrue())
|
|
|
|
})
|
|
|
|
|
|
|
|
It("does not duplicate pit file ilk events", func() {
|
|
|
|
err = pitFileIlkRepository.Create(headerID, []ilk.PitFileIlkModel{test_data.PitFileIlkModel})
|
2018-08-29 14:12:29 +00:00
|
|
|
Expect(err).NotTo(HaveOccurred())
|
|
|
|
|
2018-10-02 22:25:38 +00:00
|
|
|
err = pitFileIlkRepository.Create(headerID, []ilk.PitFileIlkModel{test_data.PitFileIlkModel})
|
2018-08-29 14:12:29 +00:00
|
|
|
|
|
|
|
Expect(err).To(HaveOccurred())
|
|
|
|
Expect(err.Error()).To(ContainSubstring("pq: duplicate key value violates unique constraint"))
|
|
|
|
})
|
|
|
|
|
2018-09-05 14:24:28 +00:00
|
|
|
It("removes pit file ilk if corresponding header is deleted", func() {
|
2018-10-02 22:25:38 +00:00
|
|
|
err = pitFileIlkRepository.Create(headerID, []ilk.PitFileIlkModel{test_data.PitFileIlkModel})
|
2018-08-29 14:12:29 +00:00
|
|
|
Expect(err).NotTo(HaveOccurred())
|
|
|
|
|
|
|
|
_, err = db.Exec(`DELETE FROM headers WHERE id = $1`, headerID)
|
|
|
|
|
|
|
|
Expect(err).NotTo(HaveOccurred())
|
2018-09-05 14:24:28 +00:00
|
|
|
var dbPitFile ilk.PitFileIlkModel
|
|
|
|
err = db.Get(&dbPitFile, `SELECT ilk, what, data, tx_idx, raw_log FROM maker.pit_file_ilk WHERE header_id = $1`, headerID)
|
2018-08-29 14:12:29 +00:00
|
|
|
Expect(err).To(HaveOccurred())
|
|
|
|
Expect(err).To(MatchError(sql.ErrNoRows))
|
|
|
|
})
|
|
|
|
})
|
|
|
|
|
2018-10-02 22:25:38 +00:00
|
|
|
Describe("MarkHeaderChecked", func() {
|
|
|
|
var (
|
|
|
|
db *postgres.DB
|
|
|
|
pitFileIlkRepository ilk.Repository
|
|
|
|
err error
|
|
|
|
headerID int64
|
|
|
|
)
|
|
|
|
|
|
|
|
BeforeEach(func() {
|
|
|
|
db = test_config.NewTestDB(core.Node{})
|
|
|
|
test_config.CleanTestDB(db)
|
|
|
|
headerRepository := repositories.NewHeaderRepository(db)
|
|
|
|
headerID, err = headerRepository.CreateOrUpdateHeader(core.Header{})
|
|
|
|
Expect(err).NotTo(HaveOccurred())
|
|
|
|
pitFileIlkRepository = ilk.NewPitFileIlkRepository(db)
|
|
|
|
})
|
|
|
|
|
|
|
|
It("creates a row for a new headerID", func() {
|
|
|
|
err = pitFileIlkRepository.MarkHeaderChecked(headerID)
|
|
|
|
|
|
|
|
Expect(err).NotTo(HaveOccurred())
|
|
|
|
var headerChecked bool
|
|
|
|
err = db.Get(&headerChecked, `SELECT pit_file_ilk_checked FROM public.checked_headers WHERE header_id = $1`, headerID)
|
|
|
|
Expect(err).NotTo(HaveOccurred())
|
|
|
|
Expect(headerChecked).To(BeTrue())
|
|
|
|
})
|
|
|
|
|
|
|
|
It("updates row when headerID already exists", func() {
|
|
|
|
_, err = db.Exec(`INSERT INTO public.checked_headers (header_id) VALUES ($1)`, headerID)
|
|
|
|
|
|
|
|
err = pitFileIlkRepository.MarkHeaderChecked(headerID)
|
|
|
|
|
|
|
|
Expect(err).NotTo(HaveOccurred())
|
|
|
|
var headerChecked bool
|
|
|
|
err = db.Get(&headerChecked, `SELECT pit_file_ilk_checked FROM public.checked_headers WHERE header_id = $1`, headerID)
|
|
|
|
Expect(err).NotTo(HaveOccurred())
|
|
|
|
Expect(headerChecked).To(BeTrue())
|
|
|
|
})
|
|
|
|
})
|
|
|
|
|
2018-08-29 14:12:29 +00:00
|
|
|
Describe("MissingHeaders", func() {
|
2018-10-02 22:25:38 +00:00
|
|
|
It("returns headers that haven't been checked", func() {
|
2018-08-29 14:12:29 +00:00
|
|
|
db := test_config.NewTestDB(core.Node{})
|
|
|
|
test_config.CleanTestDB(db)
|
|
|
|
headerRepository := repositories.NewHeaderRepository(db)
|
|
|
|
startingBlockNumber := int64(1)
|
|
|
|
pitFileBlockNumber := int64(2)
|
|
|
|
endingBlockNumber := int64(3)
|
|
|
|
blockNumbers := []int64{startingBlockNumber, pitFileBlockNumber, endingBlockNumber, endingBlockNumber + 1}
|
|
|
|
var headerIDs []int64
|
|
|
|
for _, n := range blockNumbers {
|
|
|
|
headerID, err := headerRepository.CreateOrUpdateHeader(core.Header{BlockNumber: n})
|
|
|
|
headerIDs = append(headerIDs, headerID)
|
|
|
|
Expect(err).NotTo(HaveOccurred())
|
|
|
|
}
|
2018-09-05 14:24:28 +00:00
|
|
|
pitFileRepository := ilk.NewPitFileIlkRepository(db)
|
2018-10-02 22:25:38 +00:00
|
|
|
err := pitFileRepository.MarkHeaderChecked(headerIDs[1])
|
2018-08-29 14:12:29 +00:00
|
|
|
Expect(err).NotTo(HaveOccurred())
|
|
|
|
|
|
|
|
headers, err := pitFileRepository.MissingHeaders(startingBlockNumber, endingBlockNumber)
|
|
|
|
|
|
|
|
Expect(err).NotTo(HaveOccurred())
|
|
|
|
Expect(len(headers)).To(Equal(2))
|
|
|
|
Expect(headers[0].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(endingBlockNumber)))
|
|
|
|
Expect(headers[1].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(endingBlockNumber)))
|
|
|
|
})
|
|
|
|
|
2018-10-02 22:25:38 +00:00
|
|
|
It("only treats headers as checked if pit file ilk logs have been checked", func() {
|
|
|
|
db := test_config.NewTestDB(core.Node{})
|
|
|
|
test_config.CleanTestDB(db)
|
|
|
|
headerRepository := repositories.NewHeaderRepository(db)
|
|
|
|
startingBlockNumber := int64(1)
|
|
|
|
pitFileIlkdBlockNumber := int64(2)
|
|
|
|
endingBlockNumber := int64(3)
|
|
|
|
blockNumbers := []int64{startingBlockNumber, pitFileIlkdBlockNumber, endingBlockNumber, endingBlockNumber + 1}
|
|
|
|
var headerIDs []int64
|
|
|
|
for _, n := range blockNumbers {
|
|
|
|
headerID, err := headerRepository.CreateOrUpdateHeader(core.Header{BlockNumber: n})
|
|
|
|
headerIDs = append(headerIDs, headerID)
|
|
|
|
Expect(err).NotTo(HaveOccurred())
|
|
|
|
}
|
|
|
|
pitFileIlkRepository := ilk.NewPitFileIlkRepository(db)
|
|
|
|
_, err := db.Exec(`INSERT INTO public.checked_headers (header_id) VALUES ($1)`, headerIDs[1])
|
|
|
|
Expect(err).NotTo(HaveOccurred())
|
|
|
|
|
|
|
|
headers, err := pitFileIlkRepository.MissingHeaders(startingBlockNumber, endingBlockNumber)
|
|
|
|
|
|
|
|
Expect(err).NotTo(HaveOccurred())
|
|
|
|
Expect(len(headers)).To(Equal(3))
|
|
|
|
Expect(headers[0].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(endingBlockNumber), Equal(pitFileIlkdBlockNumber)))
|
|
|
|
Expect(headers[1].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(endingBlockNumber), Equal(pitFileIlkdBlockNumber)))
|
|
|
|
Expect(headers[2].BlockNumber).To(Or(Equal(startingBlockNumber), Equal(endingBlockNumber), Equal(pitFileIlkdBlockNumber)))
|
|
|
|
})
|
|
|
|
|
2018-08-29 14:12:29 +00:00
|
|
|
It("only returns headers associated with the current node", func() {
|
|
|
|
db := test_config.NewTestDB(core.Node{})
|
|
|
|
test_config.CleanTestDB(db)
|
|
|
|
blockNumbers := []int64{1, 2, 3}
|
|
|
|
headerRepository := repositories.NewHeaderRepository(db)
|
|
|
|
dbTwo := test_config.NewTestDB(core.Node{ID: "second"})
|
|
|
|
headerRepositoryTwo := repositories.NewHeaderRepository(dbTwo)
|
|
|
|
var headerIDs []int64
|
|
|
|
for _, n := range blockNumbers {
|
|
|
|
headerID, err := headerRepository.CreateOrUpdateHeader(core.Header{BlockNumber: n})
|
|
|
|
Expect(err).NotTo(HaveOccurred())
|
|
|
|
headerIDs = append(headerIDs, headerID)
|
|
|
|
_, err = headerRepositoryTwo.CreateOrUpdateHeader(core.Header{BlockNumber: n})
|
|
|
|
Expect(err).NotTo(HaveOccurred())
|
|
|
|
}
|
2018-09-05 14:24:28 +00:00
|
|
|
pitFileRepository := ilk.NewPitFileIlkRepository(db)
|
|
|
|
pitFileRepositoryTwo := ilk.NewPitFileIlkRepository(dbTwo)
|
2018-10-02 22:25:38 +00:00
|
|
|
err := pitFileRepository.MarkHeaderChecked(headerIDs[0])
|
2018-08-29 14:12:29 +00:00
|
|
|
Expect(err).NotTo(HaveOccurred())
|
|
|
|
|
|
|
|
nodeOneMissingHeaders, err := pitFileRepository.MissingHeaders(blockNumbers[0], blockNumbers[len(blockNumbers)-1])
|
|
|
|
Expect(err).NotTo(HaveOccurred())
|
|
|
|
Expect(len(nodeOneMissingHeaders)).To(Equal(len(blockNumbers) - 1))
|
|
|
|
|
|
|
|
nodeTwoMissingHeaders, err := pitFileRepositoryTwo.MissingHeaders(blockNumbers[0], blockNumbers[len(blockNumbers)-1])
|
|
|
|
Expect(err).NotTo(HaveOccurred())
|
|
|
|
Expect(len(nodeTwoMissingHeaders)).To(Equal(len(blockNumbers)))
|
|
|
|
})
|
|
|
|
})
|
|
|
|
})
|