2018-11-23 18:12:24 +00:00
|
|
|
// VulcanizeDB
|
|
|
|
// Copyright © 2018 Vulcanize
|
|
|
|
|
|
|
|
// This program is free software: you can redistribute it and/or modify
|
|
|
|
// it under the terms of the GNU Affero General Public License as published by
|
|
|
|
// the Free Software Foundation, either version 3 of the License, or
|
|
|
|
// (at your option) any later version.
|
|
|
|
|
|
|
|
// This program is distributed in the hope that it will be useful,
|
|
|
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
// GNU Affero General Public License for more details.
|
|
|
|
|
|
|
|
// You should have received a copy of the GNU Affero General Public License
|
|
|
|
// along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
|
|
|
|
package repository
|
|
|
|
|
|
|
|
import (
|
2018-11-24 04:26:07 +00:00
|
|
|
"database/sql"
|
2018-12-14 17:52:02 +00:00
|
|
|
"fmt"
|
2018-12-07 15:38:46 +00:00
|
|
|
|
2018-11-24 04:26:07 +00:00
|
|
|
"github.com/hashicorp/golang-lru"
|
|
|
|
|
2018-11-23 18:12:24 +00:00
|
|
|
"github.com/vulcanize/vulcanizedb/pkg/core"
|
|
|
|
"github.com/vulcanize/vulcanizedb/pkg/datastore/postgres"
|
|
|
|
)
|
|
|
|
|
2018-11-24 04:26:07 +00:00
|
|
|
const columnCacheSize = 1000
|
|
|
|
|
2018-11-23 18:12:24 +00:00
|
|
|
type HeaderRepository interface {
|
2018-12-18 17:31:21 +00:00
|
|
|
AddCheckColumn(id string) error
|
|
|
|
AddCheckColumns(ids []string) error
|
2018-11-23 18:12:24 +00:00
|
|
|
MarkHeaderChecked(headerID int64, eventID string) error
|
2018-12-18 17:31:21 +00:00
|
|
|
MarkHeaderCheckedForAll(headerID int64, ids []string) error
|
|
|
|
MarkHeadersCheckedForAll(headers []core.Header, ids []string) error
|
2018-11-23 18:12:24 +00:00
|
|
|
MissingHeaders(startingBlockNumber int64, endingBlockNumber int64, eventID string) ([]core.Header, error)
|
2018-12-14 17:52:02 +00:00
|
|
|
MissingMethodsCheckedEventsIntersection(startingBlockNumber, endingBlockNumber int64, methodIds, eventIds []string) ([]core.Header, error)
|
2018-12-18 17:31:21 +00:00
|
|
|
MissingHeadersForAll(startingBlockNumber, endingBlockNumber int64, ids []string) ([]core.Header, error)
|
2018-11-24 04:26:07 +00:00
|
|
|
CheckCache(key string) (interface{}, bool)
|
2018-11-23 18:12:24 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
type headerRepository struct {
|
2018-11-24 04:26:07 +00:00
|
|
|
db *postgres.DB
|
|
|
|
columns *lru.Cache // Cache created columns to minimize db connections
|
2018-11-23 18:12:24 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func NewHeaderRepository(db *postgres.DB) *headerRepository {
|
2018-11-24 04:26:07 +00:00
|
|
|
ccs, _ := lru.New(columnCacheSize)
|
2018-11-23 18:12:24 +00:00
|
|
|
return &headerRepository{
|
2018-11-24 04:26:07 +00:00
|
|
|
db: db,
|
|
|
|
columns: ccs,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-12-14 17:52:02 +00:00
|
|
|
func (r *headerRepository) AddCheckColumn(id string) error {
|
2018-11-24 04:26:07 +00:00
|
|
|
// Check cache to see if column already exists before querying pg
|
2018-12-14 17:52:02 +00:00
|
|
|
_, ok := r.columns.Get(id)
|
2018-11-24 04:26:07 +00:00
|
|
|
if ok {
|
|
|
|
return nil
|
2018-11-23 18:12:24 +00:00
|
|
|
}
|
2018-11-24 04:26:07 +00:00
|
|
|
|
|
|
|
pgStr := "ALTER TABLE public.checked_headers ADD COLUMN IF NOT EXISTS "
|
2019-02-08 16:35:46 +00:00
|
|
|
pgStr = pgStr + id + " INTEGER NOT NULL DEFAULT 0"
|
2018-11-24 04:26:07 +00:00
|
|
|
_, err := r.db.Exec(pgStr)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
// Add column name to cache
|
2018-12-14 17:52:02 +00:00
|
|
|
r.columns.Add(id, true)
|
2018-11-24 04:26:07 +00:00
|
|
|
|
|
|
|
return nil
|
2018-11-23 18:12:24 +00:00
|
|
|
}
|
|
|
|
|
2018-12-18 17:31:21 +00:00
|
|
|
func (r *headerRepository) AddCheckColumns(ids []string) error {
|
|
|
|
var err error
|
|
|
|
baseQuery := "ALTER TABLE public.checked_headers"
|
|
|
|
input := make([]string, 0, len(ids))
|
|
|
|
for _, id := range ids {
|
|
|
|
_, ok := r.columns.Get(id)
|
|
|
|
if !ok {
|
2019-02-08 16:35:46 +00:00
|
|
|
baseQuery += " ADD COLUMN IF NOT EXISTS " + id + " INTEGER NOT NULL DEFAULT 0,"
|
2018-12-18 17:31:21 +00:00
|
|
|
input = append(input, id)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if len(input) > 0 {
|
|
|
|
_, err = r.db.Exec(baseQuery[:len(baseQuery)-1])
|
|
|
|
if err == nil {
|
|
|
|
for _, id := range input {
|
|
|
|
r.columns.Add(id, true)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2018-12-14 17:52:02 +00:00
|
|
|
func (r *headerRepository) MarkHeaderChecked(headerID int64, id string) error {
|
|
|
|
_, err := r.db.Exec(`INSERT INTO public.checked_headers (header_id, `+id+`)
|
2018-11-23 18:12:24 +00:00
|
|
|
VALUES ($1, $2)
|
|
|
|
ON CONFLICT (header_id) DO
|
2019-02-08 16:35:46 +00:00
|
|
|
UPDATE SET `+id+` = checked_headers.`+id+` + 1`, headerID, 1)
|
2018-11-24 04:26:07 +00:00
|
|
|
|
2018-11-23 18:12:24 +00:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2018-12-18 17:31:21 +00:00
|
|
|
func (r *headerRepository) MarkHeaderCheckedForAll(headerID int64, ids []string) error {
|
|
|
|
pgStr := "INSERT INTO public.checked_headers (header_id, "
|
|
|
|
for _, id := range ids {
|
|
|
|
pgStr += id + ", "
|
|
|
|
}
|
|
|
|
pgStr = pgStr[:len(pgStr)-2] + ") VALUES ($1, "
|
|
|
|
for i := 0; i < len(ids); i++ {
|
2019-02-08 16:35:46 +00:00
|
|
|
pgStr += "1, "
|
2018-12-18 17:31:21 +00:00
|
|
|
}
|
|
|
|
pgStr = pgStr[:len(pgStr)-2] + ") ON CONFLICT (header_id) DO UPDATE SET "
|
|
|
|
for _, id := range ids {
|
2019-02-08 16:35:46 +00:00
|
|
|
pgStr += id + `= checked_headers.` + id + ` + 1, `
|
2018-12-18 17:31:21 +00:00
|
|
|
}
|
|
|
|
pgStr = pgStr[:len(pgStr)-2]
|
|
|
|
_, err := r.db.Exec(pgStr, headerID)
|
|
|
|
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
func (r *headerRepository) MarkHeadersCheckedForAll(headers []core.Header, ids []string) error {
|
2018-12-14 17:52:02 +00:00
|
|
|
tx, err := r.db.Begin()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, header := range headers {
|
|
|
|
pgStr := "INSERT INTO public.checked_headers (header_id, "
|
|
|
|
for _, id := range ids {
|
|
|
|
pgStr += id + ", "
|
|
|
|
}
|
|
|
|
pgStr = pgStr[:len(pgStr)-2] + ") VALUES ($1, "
|
|
|
|
for i := 0; i < len(ids); i++ {
|
2019-02-08 16:35:46 +00:00
|
|
|
pgStr += "1, "
|
2018-12-14 17:52:02 +00:00
|
|
|
}
|
|
|
|
pgStr = pgStr[:len(pgStr)-2] + ") ON CONFLICT (header_id) DO UPDATE SET "
|
|
|
|
for _, id := range ids {
|
2019-02-08 16:35:46 +00:00
|
|
|
pgStr += fmt.Sprintf("%s = checked_headers.%s + 1, ", id, id)
|
2018-12-14 17:52:02 +00:00
|
|
|
}
|
|
|
|
pgStr = pgStr[:len(pgStr)-2]
|
|
|
|
_, err = tx.Exec(pgStr, header.Id)
|
|
|
|
if err != nil {
|
|
|
|
tx.Rollback()
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return tx.Commit()
|
|
|
|
}
|
|
|
|
|
|
|
|
func (r *headerRepository) MissingHeaders(startingBlockNumber, endingBlockNumber int64, id string) ([]core.Header, error) {
|
2018-11-23 18:12:24 +00:00
|
|
|
var result []core.Header
|
|
|
|
var query string
|
|
|
|
var err error
|
|
|
|
|
|
|
|
if endingBlockNumber == -1 {
|
|
|
|
query = `SELECT headers.id, headers.block_number, headers.hash FROM headers
|
|
|
|
LEFT JOIN checked_headers on headers.id = header_id
|
2019-02-08 16:35:46 +00:00
|
|
|
WHERE (header_id ISNULL OR checked_headers.` + id + `=0)
|
2018-11-23 18:12:24 +00:00
|
|
|
AND headers.block_number >= $1
|
2018-12-07 15:38:46 +00:00
|
|
|
AND headers.eth_node_fingerprint = $2
|
|
|
|
ORDER BY headers.block_number`
|
2018-11-23 18:12:24 +00:00
|
|
|
err = r.db.Select(&result, query, startingBlockNumber, r.db.Node.ID)
|
|
|
|
} else {
|
|
|
|
query = `SELECT headers.id, headers.block_number, headers.hash FROM headers
|
|
|
|
LEFT JOIN checked_headers on headers.id = header_id
|
2019-02-08 16:35:46 +00:00
|
|
|
WHERE (header_id ISNULL OR checked_headers.` + id + `=0)
|
2018-11-23 18:12:24 +00:00
|
|
|
AND headers.block_number >= $1
|
|
|
|
AND headers.block_number <= $2
|
2018-12-07 15:38:46 +00:00
|
|
|
AND headers.eth_node_fingerprint = $3
|
|
|
|
ORDER BY headers.block_number`
|
2018-11-23 18:12:24 +00:00
|
|
|
err = r.db.Select(&result, query, startingBlockNumber, endingBlockNumber, r.db.Node.ID)
|
|
|
|
}
|
|
|
|
|
|
|
|
return result, err
|
|
|
|
}
|
2018-11-24 04:26:07 +00:00
|
|
|
|
2018-12-18 17:31:21 +00:00
|
|
|
func (r *headerRepository) MissingHeadersForAll(startingBlockNumber, endingBlockNumber int64, ids []string) ([]core.Header, error) {
|
|
|
|
var result []core.Header
|
|
|
|
var query string
|
|
|
|
var err error
|
|
|
|
|
|
|
|
baseQuery := `SELECT headers.id, headers.block_number, headers.hash FROM headers
|
|
|
|
LEFT JOIN checked_headers on headers.id = header_id
|
|
|
|
WHERE (header_id ISNULL`
|
|
|
|
for _, id := range ids {
|
2019-02-08 16:35:46 +00:00
|
|
|
baseQuery += ` OR checked_headers.` + id + `= 0`
|
2018-12-18 17:31:21 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
if endingBlockNumber == -1 {
|
|
|
|
endStr := `) AND headers.block_number >= $1
|
|
|
|
AND headers.eth_node_fingerprint = $2
|
|
|
|
ORDER BY headers.block_number`
|
|
|
|
query = baseQuery + endStr
|
|
|
|
err = r.db.Select(&result, query, startingBlockNumber, r.db.Node.ID)
|
|
|
|
} else {
|
|
|
|
endStr := `) AND headers.block_number >= $1
|
|
|
|
AND headers.block_number <= $2
|
|
|
|
AND headers.eth_node_fingerprint = $3
|
|
|
|
ORDER BY headers.block_number`
|
|
|
|
query = baseQuery + endStr
|
|
|
|
err = r.db.Select(&result, query, startingBlockNumber, endingBlockNumber, r.db.Node.ID)
|
|
|
|
}
|
|
|
|
|
|
|
|
return result, err
|
|
|
|
}
|
|
|
|
|
2018-12-14 17:52:02 +00:00
|
|
|
func (r *headerRepository) MissingMethodsCheckedEventsIntersection(startingBlockNumber, endingBlockNumber int64, methodIds, eventIds []string) ([]core.Header, error) {
|
|
|
|
var result []core.Header
|
|
|
|
var query string
|
|
|
|
var err error
|
2018-12-18 17:31:21 +00:00
|
|
|
|
2018-12-14 17:52:02 +00:00
|
|
|
baseQuery := `SELECT headers.id, headers.block_number, headers.hash FROM headers
|
|
|
|
LEFT JOIN checked_headers on headers.id = header_id
|
|
|
|
WHERE (header_id IS NOT NULL`
|
|
|
|
for _, id := range eventIds {
|
2019-02-08 16:35:46 +00:00
|
|
|
baseQuery += ` AND ` + id + `!=0`
|
2018-12-14 17:52:02 +00:00
|
|
|
}
|
|
|
|
baseQuery += `) AND (`
|
|
|
|
for _, id := range methodIds {
|
2019-02-08 16:35:46 +00:00
|
|
|
baseQuery += id + ` =0 AND `
|
2018-12-14 17:52:02 +00:00
|
|
|
}
|
|
|
|
baseQuery = baseQuery[:len(baseQuery)-5] + `) `
|
|
|
|
|
|
|
|
if endingBlockNumber == -1 {
|
|
|
|
endStr := `AND headers.block_number >= $1
|
|
|
|
AND headers.eth_node_fingerprint = $2
|
|
|
|
ORDER BY headers.block_number`
|
|
|
|
query = baseQuery + endStr
|
|
|
|
err = r.db.Select(&result, query, startingBlockNumber, r.db.Node.ID)
|
|
|
|
} else {
|
|
|
|
endStr := `AND headers.block_number >= $1
|
|
|
|
AND headers.block_number <= $2
|
|
|
|
AND headers.eth_node_fingerprint = $3
|
|
|
|
ORDER BY headers.block_number`
|
|
|
|
query = baseQuery + endStr
|
|
|
|
err = r.db.Select(&result, query, startingBlockNumber, endingBlockNumber, r.db.Node.ID)
|
|
|
|
}
|
|
|
|
|
|
|
|
return result, err
|
|
|
|
}
|
|
|
|
|
2018-11-24 04:26:07 +00:00
|
|
|
func (r *headerRepository) CheckCache(key string) (interface{}, bool) {
|
|
|
|
return r.columns.Get(key)
|
|
|
|
}
|
|
|
|
|
|
|
|
func MarkHeaderCheckedInTransaction(headerID int64, tx *sql.Tx, eventID string) error {
|
|
|
|
_, err := tx.Exec(`INSERT INTO public.checked_headers (header_id, `+eventID+`)
|
|
|
|
VALUES ($1, $2)
|
|
|
|
ON CONFLICT (header_id) DO
|
2019-02-08 16:35:46 +00:00
|
|
|
UPDATE SET `+eventID+` = checked_headers.`+eventID+` + 1`, headerID, 1)
|
2018-11-24 04:26:07 +00:00
|
|
|
return err
|
|
|
|
}
|