Compare commits
74 Commits
7b47871269
...
b91e854974
Author | SHA1 | Date | |
---|---|---|---|
b91e854974 | |||
7605fb0358 | |||
cd82c21eae | |||
65351d7ee5 | |||
6e4cb43e0a | |||
f15ec1cd07 | |||
eb56eee54e | |||
31588ddb91 | |||
|
18a7f23173 | ||
|
ef1846f58c | ||
|
2eaa2c2262 | ||
|
bfb0447710 | ||
|
2c08f5594c | ||
|
253b1087bf | ||
|
8a3b6bf2ac | ||
|
b221bde694 | ||
|
2c41537636 | ||
|
d83b088c37 | ||
|
aca78f89b7 | ||
|
1788b899a4 | ||
|
1fe7a04af0 | ||
|
68ebdca6f9 | ||
|
5f7915649d | ||
|
f6df15cb38 | ||
|
03517a0eb4 | ||
|
54205d8787 | ||
|
8d8ff99d19 | ||
|
3054063942 | ||
|
54e181ca68 | ||
|
269333bb17 | ||
|
db532467cc | ||
|
ccdf9d91fc | ||
|
aea3decebf | ||
|
2db16d69da | ||
|
e3d694e63c | ||
|
2c0f3456f5 | ||
46cd8b1834 | |||
c0cd87ba6a | |||
|
2db235f244 | ||
a827c4a36b | |||
14b9c169bc | |||
a2772762e1 | |||
b1440d9673 | |||
|
67d8bced4f | ||
|
12f4810ced | ||
|
7a8d38c955 | ||
|
d09cd0afe6 | ||
|
bcca82eaa3 | ||
|
dad77b561d | ||
|
c939822a95 | ||
6d103cb1f1 | |||
99f84b6fe6 | |||
5b7f5feb1b | |||
7f8885f044 | |||
d235f3b84c | |||
|
1fdb8763ac | ||
|
cd5aee30c7 | ||
|
040638ca05 | ||
|
681e656034 | ||
85896f91b7 | |||
52c7f84432 | |||
aa6ee578f4 | |||
|
1f898f60c3 | ||
0c56037e1f | |||
|
9aa683442e | ||
|
adf3dd4b6f | ||
|
5ea4b6766a | ||
|
2c4fd6f099 | ||
|
f964b53fe3 | ||
b8dad6a09b | |||
|
1dc90d0417 | ||
|
784ffb8726 | ||
|
1ddffe65be | ||
cdcc3df9f2 |
1
.dockerignore
Normal file
1
.dockerignore
Normal file
@ -0,0 +1 @@
|
|||||||
|
.git
|
28
.gitea/workflows/publish.yml
Normal file
28
.gitea/workflows/publish.yml
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
name: Publish Docker image
|
||||||
|
|
||||||
|
on:
|
||||||
|
release:
|
||||||
|
types: [published]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
docker-build:
|
||||||
|
name: Run docker build
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
- id: vars
|
||||||
|
name: Output SHA and version tag
|
||||||
|
run: |
|
||||||
|
echo "sha=${GITHUB_SHA:0:7}" >> $GITHUB_OUTPUT
|
||||||
|
echo "tag=${GITHUB_REF#refs/tags/}" >> $GITHUB_OUTPUT
|
||||||
|
- name: Build and tag image
|
||||||
|
run: |
|
||||||
|
docker build . \
|
||||||
|
-t cerc-io/eth-statediff-service \
|
||||||
|
-t git.vdb.to/cerc-io/eth-statediff-service/eth-statediff-service:${{steps.vars.outputs.sha}} \
|
||||||
|
-t git.vdb.to/cerc-io/eth-statediff-service/eth-statediff-service:${{steps.vars.outputs.tag}}
|
||||||
|
- name: Push image tags
|
||||||
|
run: |
|
||||||
|
echo ${{ secrets.GITEA_PUBLISH_TOKEN }} | docker login https://git.vdb.to -u cerccicd --password-stdin
|
||||||
|
docker push git.vdb.to/cerc-io/eth-statediff-service/eth-statediff-service:${{steps.vars.outputs.sha}}
|
||||||
|
docker push git.vdb.to/cerc-io/eth-statediff-service/eth-statediff-service:${{steps.vars.outputs.tag}}
|
65
.gitea/workflows/tests.yml
Normal file
65
.gitea/workflows/tests.yml
Normal file
@ -0,0 +1,65 @@
|
|||||||
|
name: Tests
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
branches: '*'
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
- ci-test
|
||||||
|
workflow_call:
|
||||||
|
|
||||||
|
# Needed until we can incorporate docker startup into the executor container
|
||||||
|
env:
|
||||||
|
DOCKER_HOST: unix:///var/run/dind.sock
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
integration-tests:
|
||||||
|
name: Run integration tests
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
- uses: actions/setup-go@v3
|
||||||
|
with:
|
||||||
|
go-version-file: go.mod
|
||||||
|
check-latest: true
|
||||||
|
- name: Run dockerd
|
||||||
|
run: |
|
||||||
|
dockerd -H $DOCKER_HOST --userland-proxy=false &
|
||||||
|
sleep 5
|
||||||
|
- name: Run DB container
|
||||||
|
run: docker compose -f test/compose.yml up --wait
|
||||||
|
- name: Configure Gitea access
|
||||||
|
env:
|
||||||
|
TOKEN: ${{ secrets.CICD_REPO_TOKEN }}
|
||||||
|
run: |
|
||||||
|
git config --global url."https://$TOKEN:@git.vdb.to/".insteadOf "https://git.vdb.to/"
|
||||||
|
- name: Build package
|
||||||
|
run: go build .
|
||||||
|
- name: Run server
|
||||||
|
env:
|
||||||
|
DATABASE_TYPE: postgres
|
||||||
|
LEVELDB_PATH: ./fixture/chaindata
|
||||||
|
LEVELDB_ANCIENT: ./fixture/chaindata/ancient
|
||||||
|
LOG_FILE_PATH: ./server-log
|
||||||
|
run: |
|
||||||
|
./eth-statediff-service --config ./test/ci-config.toml serve &
|
||||||
|
|
||||||
|
# Run a sanity test against the fixture data
|
||||||
|
# Complete integration tests are TODO
|
||||||
|
- name: Run test
|
||||||
|
run: |
|
||||||
|
./scripts/request-range.sh 0 32 || (E=$?; cat ./server-log; exit $E)
|
||||||
|
|
||||||
|
until grep "Finished processing block 32" ./server-log
|
||||||
|
do sleep 1; done
|
||||||
|
|
||||||
|
count_results() {
|
||||||
|
query="select count(*) from $1;"
|
||||||
|
docker exec -e PGPASSWORD=password test-ipld-eth-db-1 \
|
||||||
|
psql -tA cerc_testing -U vdbm -c "$query"
|
||||||
|
}
|
||||||
|
set -x
|
||||||
|
[[ "$(count_results eth.header_cids)" = 33 ]]
|
||||||
|
[[ "$(count_results eth.state_cids)" = 21 ]]
|
||||||
|
[[ "$(count_results eth.storage_cids)" = 18 ]]
|
29
.github/workflows/issues-notion-sync.yml
vendored
29
.github/workflows/issues-notion-sync.yml
vendored
@ -1,29 +0,0 @@
|
|||||||
name: Notion Sync
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
issues:
|
|
||||||
types:
|
|
||||||
[
|
|
||||||
opened,
|
|
||||||
edited,
|
|
||||||
labeled,
|
|
||||||
unlabeled,
|
|
||||||
assigned,
|
|
||||||
unassigned,
|
|
||||||
milestoned,
|
|
||||||
demilestoned,
|
|
||||||
reopened,
|
|
||||||
closed,
|
|
||||||
]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
notion_job:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
name: Add GitHub Issues to Notion
|
|
||||||
steps:
|
|
||||||
- name: Add GitHub Issues to Notion
|
|
||||||
uses: vulcanize/notion-github-action@v1.2.4-issueid
|
|
||||||
with:
|
|
||||||
notion-token: ${{ secrets.NOTION_TOKEN }}
|
|
||||||
notion-db: ${{ secrets.NOTION_DATABASE }}
|
|
7
.github/workflows/on-pr.yaml
vendored
7
.github/workflows/on-pr.yaml
vendored
@ -1,7 +0,0 @@
|
|||||||
name: Docker Build
|
|
||||||
|
|
||||||
on: [pull_request]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
run-tests:
|
|
||||||
uses: ./.github/workflows/tests.yml
|
|
44
.github/workflows/publish.yaml
vendored
44
.github/workflows/publish.yaml
vendored
@ -1,44 +0,0 @@
|
|||||||
name: Publish Docker image
|
|
||||||
on:
|
|
||||||
release:
|
|
||||||
types: [published]
|
|
||||||
jobs:
|
|
||||||
run-tests:
|
|
||||||
uses: ./.github/workflows/tests.yml
|
|
||||||
build:
|
|
||||||
name: Run docker build
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: run-tests
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Get the version
|
|
||||||
id: vars
|
|
||||||
run: echo ::set-output name=sha::$(echo ${GITHUB_SHA:0:7})
|
|
||||||
- name: Run docker build
|
|
||||||
run: make docker-build
|
|
||||||
- name: Tag docker image
|
|
||||||
run: docker tag vulcanize/eth-statediff-service docker.pkg.github.com/vulcanize/eth-statediff-service/eth-statediff-service:${{steps.vars.outputs.sha}}
|
|
||||||
- name: Docker Login
|
|
||||||
run: echo ${{ secrets.GITHUB_TOKEN }} | docker login https://docker.pkg.github.com -u vulcanize --password-stdin
|
|
||||||
- name: Docker Push
|
|
||||||
run: docker push docker.pkg.github.com/vulcanize/eth-statediff-service/eth-statediff-service:${{steps.vars.outputs.sha}}
|
|
||||||
push_to_registries:
|
|
||||||
name: Push Docker image to Docker Hub
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: build
|
|
||||||
steps:
|
|
||||||
- name: Get the version
|
|
||||||
id: vars
|
|
||||||
run: |
|
|
||||||
echo ::set-output name=sha::$(echo ${GITHUB_SHA:0:7})
|
|
||||||
echo ::set-output name=tag::$(echo ${GITHUB_REF#refs/tags/})
|
|
||||||
- name: Docker Login to Github Registry
|
|
||||||
run: echo ${{ secrets.GITHUB_TOKEN }} | docker login https://docker.pkg.github.com -u vulcanize --password-stdin
|
|
||||||
- name: Docker Pull
|
|
||||||
run: docker pull docker.pkg.github.com/vulcanize/eth-statediff-service/eth-statediff-service:${{steps.vars.outputs.sha}}
|
|
||||||
- name: Docker Login to Docker Registry
|
|
||||||
run: echo ${{ secrets.VULCANIZEJENKINS_PAT }} | docker login -u vulcanizejenkins --password-stdin
|
|
||||||
- name: Tag docker image
|
|
||||||
run: docker tag docker.pkg.github.com/vulcanize/eth-statediff-service/eth-statediff-service:${{steps.vars.outputs.sha}} vulcanize/eth-statediff-service:${{steps.vars.outputs.tag}}
|
|
||||||
- name: Docker Push to Docker Hub
|
|
||||||
run: docker push vulcanize/eth-statediff-service:${{steps.vars.outputs.tag}}
|
|
37
.github/workflows/tests.yml
vendored
37
.github/workflows/tests.yml
vendored
@ -1,37 +0,0 @@
|
|||||||
name: Tests for Geth that are used in multiple jobs.
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_call:
|
|
||||||
|
|
||||||
env:
|
|
||||||
GOPATH: /tmp/go
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
name: Run docker build
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
- name: Run docker build
|
|
||||||
run: make docker-build
|
|
||||||
|
|
||||||
statediff-unit-test:
|
|
||||||
name: Run statediff unit tests
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
env:
|
|
||||||
GO111MODULE: on
|
|
||||||
steps:
|
|
||||||
- name: Create GOPATH
|
|
||||||
run: mkdir -p /tmp/go
|
|
||||||
|
|
||||||
- uses: actions/setup-go@v3
|
|
||||||
with:
|
|
||||||
go-version: ">=1.18.0"
|
|
||||||
check-latest: true
|
|
||||||
|
|
||||||
- name: Checkout code
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Run unit tests
|
|
||||||
run: |
|
|
||||||
make test
|
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -1,2 +1,3 @@
|
|||||||
.idea/
|
.idea/
|
||||||
eth-statediff-service
|
eth-statediff-service
|
||||||
|
.vscode
|
||||||
|
28
Dockerfile
28
Dockerfile
@ -1,21 +1,24 @@
|
|||||||
FROM golang:1.18-alpine as builder
|
FROM golang:1.19-alpine as builder
|
||||||
|
|
||||||
RUN apk --update --no-cache add make git g++ linux-headers
|
RUN apk add --no-cache git gcc musl-dev binutils-gold
|
||||||
# DEBUG
|
# DEBUG
|
||||||
RUN apk add busybox-extras
|
RUN apk add busybox-extras
|
||||||
|
|
||||||
# Get and build ipfs-blockchain-watcher
|
WORKDIR /eth-statediff-service
|
||||||
ADD . /go/src/github.com/vulcanize/eth-statediff-service
|
|
||||||
#RUN git clone https://github.com/vulcanize/eth-statediff-service.git /go/src/github.com/vulcanize/eth-statediff-service
|
|
||||||
|
|
||||||
WORKDIR /go/src/github.com/vulcanize/eth-statediff-service
|
ARG GIT_VDBTO_TOKEN
|
||||||
RUN GO111MODULE=on CGO_ENABLED=0 GOOS=linux go build -a -installsuffix cgo -ldflags '-extldflags "-static"' -o eth-statediff-service .
|
|
||||||
|
COPY go.mod go.sum ./
|
||||||
|
RUN if [ -n "$GIT_VDBTO_TOKEN" ]; then git config --global url."https://$GIT_VDBTO_TOKEN:@git.vdb.to/".insteadOf "https://git.vdb.to/"; fi && \
|
||||||
|
go mod download && \
|
||||||
|
rm -f ~/.gitconfig
|
||||||
|
COPY . .
|
||||||
|
|
||||||
|
RUN go build -ldflags '-extldflags "-static"' -o eth-statediff-service .
|
||||||
|
|
||||||
# app container
|
|
||||||
FROM alpine
|
FROM alpine
|
||||||
|
|
||||||
ARG USER="vdbm"
|
ARG USER="vdbm"
|
||||||
ARG CONFIG_FILE="./environments/docker.toml"
|
|
||||||
ARG EXPOSE_PORT=8545
|
ARG EXPOSE_PORT=8545
|
||||||
|
|
||||||
RUN adduser -Du 5000 $USER adm
|
RUN adduser -Du 5000 $USER adm
|
||||||
@ -27,12 +30,11 @@ USER $USER
|
|||||||
|
|
||||||
# chown first so dir is writable
|
# chown first so dir is writable
|
||||||
# note: using $USER is merged, but not in the stable release yet
|
# note: using $USER is merged, but not in the stable release yet
|
||||||
COPY --chown=5000:5000 --from=builder /go/src/github.com/vulcanize/eth-statediff-service/$CONFIG_FILE config.toml
|
COPY --chown=5000:5000 --from=builder /eth-statediff-service/startup_script.sh .
|
||||||
COPY --chown=5000:5000 --from=builder /go/src/github.com/vulcanize/eth-statediff-service/startup_script.sh .
|
COPY --chown=5000:5000 --from=builder /eth-statediff-service/environments environments
|
||||||
COPY --chown=5000:5000 --from=builder /go/src/github.com/vulcanize/eth-statediff-service/environments environments
|
|
||||||
|
|
||||||
# keep binaries immutable
|
# keep binaries immutable
|
||||||
COPY --from=builder /go/src/github.com/vulcanize/eth-statediff-service/eth-statediff-service eth-statediff-service
|
COPY --from=builder /eth-statediff-service/eth-statediff-service eth-statediff-service
|
||||||
|
|
||||||
EXPOSE $EXPOSE_PORT
|
EXPOSE $EXPOSE_PORT
|
||||||
|
|
||||||
|
245
README.md
245
README.md
@ -1,17 +1,25 @@
|
|||||||
# eth-statediff-service
|
# eth-statediff-service
|
||||||
|
|
||||||
[![Go Report Card](https://goreportcard.com/badge/github.com/vulcanize/eth-statediff-service)](https://goreportcard.com/report/github.com/vulcanize/eth-statediff-service)
|
[![Go Report Card](https://goreportcard.com/badge/github.com/cerc-io/eth-statediff-service)](https://goreportcard.com/report/github.com/cerc-io/eth-statediff-service)
|
||||||
|
|
||||||
>> standalone statediffing service ontop of LevelDB
|
A standalone statediffing service which runs directly on top of a `go-ethereum` LevelDB instance.
|
||||||
|
|
||||||
Purpose:
|
|
||||||
|
|
||||||
Stand up a statediffing service directly on top of a go-ethereum LevelDB instance.
|
|
||||||
This service can serve historical state data over the same rpc interface as
|
This service can serve historical state data over the same rpc interface as
|
||||||
[statediffing geth](https://github.com/vulcanize/go-ethereum/releases/tag/v1.9.11-statediff-0.0.5) without needing to run a full node
|
[statediffing geth](https://github.com/cerc-io/go-ethereum) without needing to run a full node.
|
||||||
|
|
||||||
|
## Setup
|
||||||
|
|
||||||
|
Configure access to the private Git server at `git.vdb.to`, then build the executable:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
go build .
|
||||||
|
```
|
||||||
|
|
||||||
## Configuration
|
## Configuration
|
||||||
|
|
||||||
|
See [./environments/example.toml](./environments/example.toml) for an annotated example config file.
|
||||||
|
|
||||||
|
### Local Setup
|
||||||
|
|
||||||
* Create a chain config file `chain.json` according to chain config in genesis json file used by local geth.
|
* Create a chain config file `chain.json` according to chain config in genesis json file used by local geth.
|
||||||
|
|
||||||
Example:
|
Example:
|
||||||
@ -34,92 +42,171 @@ This service can serve historical state data over the same rpc interface as
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
* Sample database and chain configuration (from [environments/config.toml](./environments/config.toml)):
|
Provide the path to the above file in the config.
|
||||||
|
|
||||||
```toml
|
|
||||||
[leveldb]
|
|
||||||
mode = "local"
|
|
||||||
# Path to geth LevelDB data
|
|
||||||
path = "/path-to-local-geth-data/chaindata"
|
|
||||||
ancient = "/path-to-local-geth-data/chaindata/ancient"
|
|
||||||
|
|
||||||
[ethereum]
|
|
||||||
chainConfig = "./chain.json" # Path to custom chain config file
|
|
||||||
chainID = 41337 # Same chain ID as in chain.json
|
|
||||||
|
|
||||||
[database]
|
|
||||||
# Update database config
|
|
||||||
name = "vulcanize_testing"
|
|
||||||
hostname = "localhost"
|
|
||||||
port = 5432
|
|
||||||
user = "postgres"
|
|
||||||
password = "postgres"
|
|
||||||
type = "postgres"
|
|
||||||
```
|
|
||||||
|
|
||||||
* Service and metrics configuration:
|
|
||||||
|
|
||||||
```toml
|
|
||||||
[statediff]
|
|
||||||
serviceWorkers = 1 # Number of diffs to process concurrently
|
|
||||||
workerQueueSize = 1024 # Size of buffer for block range requests
|
|
||||||
trieWorkers = 4 # Number of state subtries to process concurrently
|
|
||||||
|
|
||||||
[cache]
|
|
||||||
database = 1024 # Trie node cache size in MB
|
|
||||||
trie = 1024 # LevelDB cache size in MiB
|
|
||||||
|
|
||||||
[prom]
|
|
||||||
dbStats = false
|
|
||||||
metrics = true
|
|
||||||
http = true
|
|
||||||
httpAddr = "localhost"
|
|
||||||
httpPort = "8889"
|
|
||||||
```
|
|
||||||
|
|
||||||
* To use a remote LevelDB RPC endpoint change the following in [config file](./environments/config.toml)
|
|
||||||
```toml
|
|
||||||
[leveldb]
|
|
||||||
mode = "remote"
|
|
||||||
url = "http://127.0.0.1:8082/" # Remote LevelDB RPC url
|
|
||||||
```
|
|
||||||
|
|
||||||
* When using the `run` command to write diffs for specific block ranges, add this:
|
|
||||||
```toml
|
|
||||||
[run]
|
|
||||||
only = false
|
|
||||||
ranges = [
|
|
||||||
[8, 15] # Block number range for which to write statediff.
|
|
||||||
]
|
|
||||||
```
|
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
|
* Create / update the config file (refer to example config above).
|
||||||
|
|
||||||
### `serve`
|
### `serve`
|
||||||
|
|
||||||
To serve state diffs over RPC:
|
* To serve the statediff RPC API:
|
||||||
|
|
||||||
`eth-statediff-service serve --config=<config path>`
|
```bash
|
||||||
|
./eth-statediff-service serve --config=<config path>
|
||||||
|
```
|
||||||
|
|
||||||
Example:
|
Example:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
./eth-statediff-service serve --config environments/config.toml
|
./eth-statediff-service serve --config environments/config.toml
|
||||||
```
|
```
|
||||||
|
|
||||||
Available RPC methods are:
|
* Available RPC methods:
|
||||||
|
* `statediff_stateTrieAt()`
|
||||||
|
* `statediff_streamCodeAndCodeHash()`
|
||||||
* `statediff_stateDiffAt()`
|
* `statediff_stateDiffAt()`
|
||||||
* `statediff_writeStateDiffAt()`
|
* `statediff_writeStateDiffAt()`
|
||||||
* `statediff_writeStateDiffsInRange()`
|
* `statediff_writeStateDiffsInRange()`
|
||||||
|
|
||||||
e.g. `curl -X POST -H 'Content-Type: application/json' --data '{"jsonrpc":"2.0","method":"statediff_writeStateDiffsInRange","params":['"$BEGIN"', '"$END"', {"includeBlock":true,"includeReceipts":true,"includeTD":true,"includeCode":true}],"id":1}' "$HOST":"$PORT"`
|
Example:
|
||||||
|
|
||||||
### `run`
|
```bash
|
||||||
|
curl -X POST -H 'Content-Type: application/json' --data '{
|
||||||
|
"jsonrpc": "2.0",
|
||||||
|
"method": "statediff_writeStateDiffsInRange",
|
||||||
|
"params": [0, 1, {
|
||||||
|
"ncludeBlock": true,
|
||||||
|
"includeReceipts": true,
|
||||||
|
"includeTD": true,
|
||||||
|
"includeCode": true
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"id": 1
|
||||||
|
}' "$HOST":"$PORT"
|
||||||
|
```
|
||||||
|
|
||||||
Produces diffs for specific block ranges.
|
* Prerun:
|
||||||
|
* The process can be configured locally with sets of ranges to process as a "prerun" to
|
||||||
|
processing directed by the server endpoints.
|
||||||
|
* This is done by turning "prerun" on in the config (`statediff.prerun = true`) and defining
|
||||||
|
ranges and params in the `prerun` section of the config.
|
||||||
|
* Set the range using `prerun.start` and `prerun.stop`. Use `prerun.ranges` if prerun on more
|
||||||
|
than one range is required.
|
||||||
|
|
||||||
Example:
|
* NOTE: Currently, `params.includeTD` must be set to / passed as `true`.
|
||||||
|
|
||||||
```bash
|
## Monitoring
|
||||||
./eth-statediff-service run --config environments/config.toml --run.ranges '[8,15]'
|
|
||||||
```
|
* Enable metrics using config parameters `prom.metrics` and `prom.http`.
|
||||||
|
* `eth-statediff-service` exposes following prometheus metrics at `/metrics` endpoint:
|
||||||
|
* `ranges_queued`: Number of range requests currently queued.
|
||||||
|
* `loaded_height`: The last block that was loaded for processing.
|
||||||
|
* `processed_height`: The last block that was processed.
|
||||||
|
* `stats.t_block_load`: Block loading time.
|
||||||
|
* `stats.t_block_processing`: Block (header, uncles, txs, rcts, tx trie, rct trie) processing time.
|
||||||
|
* `stats.t_state_processing`: State (state trie, storage tries, and code) processing time.
|
||||||
|
* `stats.t_postgres_tx_commit`: Postgres tx commit time.
|
||||||
|
* `http.count`: HTTP request count.
|
||||||
|
* `http.duration`: HTTP request duration.
|
||||||
|
* `ipc.count`: Unix socket connection count.
|
||||||
|
|
||||||
|
## Tests
|
||||||
|
|
||||||
|
* Run unit tests:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
make test
|
||||||
|
```
|
||||||
|
|
||||||
|
## Import output data in file mode into a database
|
||||||
|
|
||||||
|
* When `eth-statediff-service` is run in file mode (`database.type`: `file`) the output is in form of a SQL
|
||||||
|
file or multiple CSV files.
|
||||||
|
|
||||||
|
### SQL
|
||||||
|
|
||||||
|
* Assuming the output files are located in host's `./output_dir` directory.
|
||||||
|
|
||||||
|
* Create a directory to store post-processed output:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
mkdir -p output_dir/processed_output
|
||||||
|
```
|
||||||
|
|
||||||
|
* (Optional) Get row counts in the output:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
wc -l output_dir/statediff.sql > output_stats.txt
|
||||||
|
```
|
||||||
|
|
||||||
|
* De-duplicate data:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
sort -u output_dir/statediff.sql -o output_dir/processed_output/deduped-statediff.sql
|
||||||
|
```
|
||||||
|
|
||||||
|
* Copy over the post-processed output files to the DB server (say in `/output_dir`).
|
||||||
|
|
||||||
|
* Run the following to import data:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
psql -U <DATABASE_USER> -h <DATABASE_HOSTNAME> -p <DATABASE_PORT> <DATABASE_NAME> --set ON_ERROR_STOP=on -f /output_dir/processed_output/deduped-statediff.sql
|
||||||
|
```
|
||||||
|
|
||||||
|
### CSV
|
||||||
|
|
||||||
|
* Create an env file with the required variables. Refer [.sample.env](./scripts/.sample.env).
|
||||||
|
|
||||||
|
* (Optional) Get row counts in the output:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
./scripts/count-lines.sh <ENV_FILE_PATH>
|
||||||
|
```
|
||||||
|
|
||||||
|
* De-duplicate data:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
./scripts/dedup.sh <ENV_FILE_PATH>
|
||||||
|
```
|
||||||
|
|
||||||
|
* Perform column checks:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
./scripts/check-columns.sh <ENV_FILE_PATH>
|
||||||
|
```
|
||||||
|
|
||||||
|
Check the output logs for any rows detected with unexpected number of columns.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# log
|
||||||
|
eth.header_cids
|
||||||
|
Start: Wednesday 21 September 2022 06:00:38 PM IST
|
||||||
|
Time taken: 00:00:05
|
||||||
|
End: Wednesday 21 September 2022 06:00:43 PM IST
|
||||||
|
Total bad rows: 1 ./check-columns/eth.header_cids.txt
|
||||||
|
|
||||||
|
# bad row output
|
||||||
|
# line number, num. of columns, data
|
||||||
|
23 17 22,xxxxxx,0x07f5ea5c94aa8dea60b28f6b6315d92f2b6d78ca4b74ea409adeb191b5a114f2,0x5918487321aa57dd0c50977856c6231e7c4ee79e95b694c7c8830227d77a1ecc,bagiacgzaa726uxeuvkg6uyfsr5vwgfozf4vw26gkjn2ouqe232yzdnnbctza,45,geth,0,0xad8fa8df61b98dbda7acd6ca76d5ce4cbba663d5f608cc940957adcdb94cee8d,0xc621412320a20b4aaff5363bdf063b9d13e394ef82e55689ab703aae5db08e26,0x71ec1c7d81269ce115be81c81f13e1cc2601c292a7f20440a77257ecfdc69940,0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347,\x2000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000000,1658408419,/blocks/DMQAP5PKLSKKVDPKMCZI623DCXMS6K3NPDFEW5HKICNN5MMRWWQRJ4Q,1,0x0000000000000000000000000000000000000000
|
||||||
|
```
|
||||||
|
|
||||||
|
* Import data using `timescaledb-parallel-copy`:
|
||||||
|
(requires [`timescaledb-parallel-copy`](https://github.com/timescale/timescaledb-parallel-copy) installation; readily comes with TimescaleDB docker image)
|
||||||
|
|
||||||
|
```bash
|
||||||
|
./scripts/timescaledb-import.sh <ENV_FILE_PATH>
|
||||||
|
```
|
||||||
|
|
||||||
|
* NOTE: `COPY` command on CSVs inserts empty strings as `NULL` in the DB. Passing `FORCE_NOT_NULL <COLUMN_NAME>` forces it to insert empty strings instead. This is required to maintain compatibility of the imported statediff data with the data generated in `postgres` mode. Reference: https://www.postgresql.org/docs/14/sql-copy.html
|
||||||
|
|
||||||
|
### Stats
|
||||||
|
|
||||||
|
The binary includes a `stats` command which reports stats for the offline or remote levelDB.
|
||||||
|
|
||||||
|
At this time, the only stat supported is to return the latest/highest block height and hash found the levelDB, this is
|
||||||
|
useful for determining what the upper limit is for a standalone statediffing process on a given levelDB.
|
||||||
|
|
||||||
|
`./eth-statediff-service stats --config={path to toml config file}`
|
||||||
|
50
cmd/env.go
50
cmd/env.go
@ -30,11 +30,12 @@ const (
|
|||||||
|
|
||||||
DB_CACHE_SIZE_MB = "DB_CACHE_SIZE_MB"
|
DB_CACHE_SIZE_MB = "DB_CACHE_SIZE_MB"
|
||||||
TRIE_CACHE_SIZE_MB = "TRIE_CACHE_SIZE_MB"
|
TRIE_CACHE_SIZE_MB = "TRIE_CACHE_SIZE_MB"
|
||||||
LVLDB_MODE = "LVLDB_MODE"
|
LEVELDB_MODE = "LEVELDB_MODE"
|
||||||
LVLDB_PATH = "LVLDB_PATH"
|
LEVELDB_PATH = "LEVELDB_PATH"
|
||||||
LVLDB_ANCIENT = "LVLDB_ANCIENT"
|
LEVELDB_ANCIENT = "LEVELDB_ANCIENT"
|
||||||
LVLDB_URL = "LVLDB_URL"
|
LEVELDB_URL = "LEVELDB_URL"
|
||||||
|
|
||||||
|
STATEDIFF_PRERUN = "STATEDIFF_PRERUN"
|
||||||
STATEDIFF_TRIE_WORKERS = "STATEDIFF_TRIE_WORKERS"
|
STATEDIFF_TRIE_WORKERS = "STATEDIFF_TRIE_WORKERS"
|
||||||
STATEDIFF_SERVICE_WORKERS = "STATEDIFF_SERVICE_WORKERS"
|
STATEDIFF_SERVICE_WORKERS = "STATEDIFF_SERVICE_WORKERS"
|
||||||
STATEDIFF_WORKER_QUEUE_SIZE = "STATEDIFF_WORKER_QUEUE_SIZE"
|
STATEDIFF_WORKER_QUEUE_SIZE = "STATEDIFF_WORKER_QUEUE_SIZE"
|
||||||
@ -48,10 +49,14 @@ const (
|
|||||||
PROM_HTTP_PORT = "PROM_HTTP_PORT"
|
PROM_HTTP_PORT = "PROM_HTTP_PORT"
|
||||||
PROM_DB_STATS = "PROM_DB_STATS"
|
PROM_DB_STATS = "PROM_DB_STATS"
|
||||||
|
|
||||||
RUN_INCLUDE_BLOCK = "RUN_INCLUDE_BLOCK"
|
PRERUN_ONLY = "PRERUN_ONLY"
|
||||||
RUN_INCLUDE_RECEIPTS = "RUN_INCLUDE_RECEIPTS"
|
PRERUN_PARALLEL = "PRERUN_PARALLEL"
|
||||||
RUN_INCLUDE_TD = "RUN_INCLUDE_TD"
|
PRERUN_RANGE_START = "PRERUN_RANGE_START"
|
||||||
RUN_INCLUDE_CODE = "RUN_INCLUDE_CODE"
|
PRERUN_RANGE_STOP = "PRERUN_RANGE_STOP"
|
||||||
|
PRERUN_INCLUDE_BLOCK = "PRERUN_INCLUDE_BLOCK"
|
||||||
|
PRERUN_INCLUDE_RECEIPTS = "PRERUN_INCLUDE_RECEIPTS"
|
||||||
|
PRERUN_INCLUDE_TD = "PRERUN_INCLUDE_TD"
|
||||||
|
PRERUN_INCLUDE_CODE = "PRERUN_INCLUDE_CODE"
|
||||||
|
|
||||||
LOG_LEVEL = "LOG_LEVEL"
|
LOG_LEVEL = "LOG_LEVEL"
|
||||||
LOG_FILE_PATH = "LOG_FILE_PATH"
|
LOG_FILE_PATH = "LOG_FILE_PATH"
|
||||||
@ -66,6 +71,8 @@ const (
|
|||||||
DATABASE_DRIVER_TYPE = "DATABASE_DRIVER_TYPE"
|
DATABASE_DRIVER_TYPE = "DATABASE_DRIVER_TYPE"
|
||||||
DATABASE_DUMP_DST = "DATABASE_DUMP_DST"
|
DATABASE_DUMP_DST = "DATABASE_DUMP_DST"
|
||||||
DATABASE_FILE_PATH = "DATABASE_FILE_PATH"
|
DATABASE_FILE_PATH = "DATABASE_FILE_PATH"
|
||||||
|
DATABASE_FILE_MODE = "DATABASE_FILE_MODE"
|
||||||
|
DATABASE_FILE_CSV_DIR = "DATABASE_FILE_CSV_DIR"
|
||||||
|
|
||||||
DATABASE_MAX_IDLE_CONNECTIONS = "DATABASE_MAX_IDLE_CONNECTIONS"
|
DATABASE_MAX_IDLE_CONNECTIONS = "DATABASE_MAX_IDLE_CONNECTIONS"
|
||||||
DATABASE_MAX_OPEN_CONNECTIONS = "DATABASE_MAX_OPEN_CONNECTIONS"
|
DATABASE_MAX_OPEN_CONNECTIONS = "DATABASE_MAX_OPEN_CONNECTIONS"
|
||||||
@ -73,6 +80,8 @@ const (
|
|||||||
DATABASE_MAX_CONN_LIFETIME = "DATABASE_MAX_CONN_LIFETIME"
|
DATABASE_MAX_CONN_LIFETIME = "DATABASE_MAX_CONN_LIFETIME"
|
||||||
DATABASE_CONN_TIMEOUT = "DATABSE_CONN_TIMEOUT"
|
DATABASE_CONN_TIMEOUT = "DATABSE_CONN_TIMEOUT"
|
||||||
DATABASE_MAX_CONN_IDLE_TIME = "DATABASE_MAX_CONN_IDLE_TIME"
|
DATABASE_MAX_CONN_IDLE_TIME = "DATABASE_MAX_CONN_IDLE_TIME"
|
||||||
|
|
||||||
|
DEBUG_PPROF = "DEBUG_PPROF"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Bind env vars for eth node and DB configuration
|
// Bind env vars for eth node and DB configuration
|
||||||
@ -103,15 +112,17 @@ func init() {
|
|||||||
viper.BindEnv("database.type", DATABASE_TYPE)
|
viper.BindEnv("database.type", DATABASE_TYPE)
|
||||||
viper.BindEnv("database.driver", DATABASE_DRIVER_TYPE)
|
viper.BindEnv("database.driver", DATABASE_DRIVER_TYPE)
|
||||||
viper.BindEnv("database.dumpDestination", DATABASE_DUMP_DST)
|
viper.BindEnv("database.dumpDestination", DATABASE_DUMP_DST)
|
||||||
|
viper.BindEnv("database.fileMode", DATABASE_FILE_MODE)
|
||||||
viper.BindEnv("database.filePath", DATABASE_FILE_PATH)
|
viper.BindEnv("database.filePath", DATABASE_FILE_PATH)
|
||||||
|
viper.BindEnv("database.fileCsvDir", DATABASE_FILE_CSV_DIR)
|
||||||
|
|
||||||
viper.BindEnv("cache.database", DB_CACHE_SIZE_MB)
|
viper.BindEnv("cache.database", DB_CACHE_SIZE_MB)
|
||||||
viper.BindEnv("cache.trie", TRIE_CACHE_SIZE_MB)
|
viper.BindEnv("cache.trie", TRIE_CACHE_SIZE_MB)
|
||||||
|
|
||||||
viper.BindEnv("leveldb.mode", LVLDB_MODE)
|
viper.BindEnv("leveldb.mode", LEVELDB_MODE)
|
||||||
viper.BindEnv("leveldb.path", LVLDB_PATH)
|
viper.BindEnv("leveldb.path", LEVELDB_PATH)
|
||||||
viper.BindEnv("leveldb.ancient", LVLDB_ANCIENT)
|
viper.BindEnv("leveldb.ancient", LEVELDB_ANCIENT)
|
||||||
viper.BindEnv("leveldb.url", LVLDB_URL)
|
viper.BindEnv("leveldb.url", LEVELDB_URL)
|
||||||
|
|
||||||
viper.BindEnv("prom.metrics", PROM_METRICS)
|
viper.BindEnv("prom.metrics", PROM_METRICS)
|
||||||
viper.BindEnv("prom.http", PROM_HTTP)
|
viper.BindEnv("prom.http", PROM_HTTP)
|
||||||
@ -123,11 +134,18 @@ func init() {
|
|||||||
viper.BindEnv("statediff.trieWorkers", STATEDIFF_TRIE_WORKERS)
|
viper.BindEnv("statediff.trieWorkers", STATEDIFF_TRIE_WORKERS)
|
||||||
viper.BindEnv("statediff.workerQueueSize", STATEDIFF_WORKER_QUEUE_SIZE)
|
viper.BindEnv("statediff.workerQueueSize", STATEDIFF_WORKER_QUEUE_SIZE)
|
||||||
|
|
||||||
viper.BindEnv("run.params.includeBlock", RUN_INCLUDE_BLOCK)
|
viper.BindEnv("statediff.prerun", STATEDIFF_PRERUN)
|
||||||
viper.BindEnv("run.params.includeReceipts", RUN_INCLUDE_RECEIPTS)
|
viper.BindEnv("prerun.only", PRERUN_ONLY)
|
||||||
viper.BindEnv("run.params.includeTD", RUN_INCLUDE_TD)
|
viper.BindEnv("prerun.parallel", PRERUN_PARALLEL)
|
||||||
viper.BindEnv("run.params.includeCode", RUN_INCLUDE_CODE)
|
viper.BindEnv("prerun.start", PRERUN_RANGE_START)
|
||||||
|
viper.BindEnv("prerun.stop", PRERUN_RANGE_STOP)
|
||||||
|
viper.BindEnv("prerun.params.includeBlock", PRERUN_INCLUDE_BLOCK)
|
||||||
|
viper.BindEnv("prerun.params.includeReceipts", PRERUN_INCLUDE_RECEIPTS)
|
||||||
|
viper.BindEnv("prerun.params.includeTD", PRERUN_INCLUDE_TD)
|
||||||
|
viper.BindEnv("prerun.params.includeCode", PRERUN_INCLUDE_CODE)
|
||||||
|
|
||||||
viper.BindEnv("log.level", LOG_LEVEL)
|
viper.BindEnv("log.level", LOG_LEVEL)
|
||||||
viper.BindEnv("log.file", LOG_FILE_PATH)
|
viper.BindEnv("log.file", LOG_FILE_PATH)
|
||||||
|
|
||||||
|
viper.BindEnv("debug.pprof", DEBUG_PPROF)
|
||||||
}
|
}
|
||||||
|
76
cmd/root.go
76
cmd/root.go
@ -34,7 +34,7 @@ import (
|
|||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
"github.com/spf13/viper"
|
"github.com/spf13/viper"
|
||||||
|
|
||||||
"github.com/vulcanize/eth-statediff-service/pkg/prom"
|
"github.com/cerc-io/eth-statediff-service/pkg/prom"
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
@ -45,7 +45,7 @@ var (
|
|||||||
|
|
||||||
var rootCmd = &cobra.Command{
|
var rootCmd = &cobra.Command{
|
||||||
Use: "eth-statediff-service",
|
Use: "eth-statediff-service",
|
||||||
PersistentPreRun: setupLoggingAndMetrics,
|
PersistentPreRun: initFuncs,
|
||||||
}
|
}
|
||||||
|
|
||||||
func Execute() {
|
func Execute() {
|
||||||
@ -54,7 +54,7 @@ func Execute() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func setupLoggingAndMetrics(cmd *cobra.Command, args []string) {
|
func initFuncs(cmd *cobra.Command, args []string) {
|
||||||
logfile := viper.GetString("log.file")
|
logfile := viper.GetString("log.file")
|
||||||
if logfile != "" {
|
if logfile != "" {
|
||||||
file, err := os.OpenFile(logfile,
|
file, err := os.OpenFile(logfile,
|
||||||
@ -120,11 +120,12 @@ func init() {
|
|||||||
rootCmd.PersistentFlags().String("ancient-path", "", "path to ancient datastore")
|
rootCmd.PersistentFlags().String("ancient-path", "", "path to ancient datastore")
|
||||||
rootCmd.PersistentFlags().String("leveldb-url", "", "url to primary leveldb-ethdb-rpc server")
|
rootCmd.PersistentFlags().String("leveldb-url", "", "url to primary leveldb-ethdb-rpc server")
|
||||||
|
|
||||||
|
rootCmd.PersistentFlags().Bool("prerun", false, "turn on prerun of toml configured ranges")
|
||||||
rootCmd.PersistentFlags().Int("service-workers", 0, "number of range requests to process concurrently")
|
rootCmd.PersistentFlags().Int("service-workers", 0, "number of range requests to process concurrently")
|
||||||
rootCmd.PersistentFlags().Int("trie-workers", 0, "number of workers to use for trie traversal and processing")
|
rootCmd.PersistentFlags().Int("trie-workers", 0, "number of workers to use for trie traversal and processing")
|
||||||
rootCmd.PersistentFlags().Int("worker-queue-size", 0, "size of the range request queue for service workers")
|
rootCmd.PersistentFlags().Int("worker-queue-size", 0, "size of the range request queue for service workers")
|
||||||
|
|
||||||
rootCmd.PersistentFlags().String("database-name", "vulcanize_public", "database name")
|
rootCmd.PersistentFlags().String("database-name", "cerc_public", "database name")
|
||||||
rootCmd.PersistentFlags().Int("database-port", 5432, "database port")
|
rootCmd.PersistentFlags().Int("database-port", 5432, "database port")
|
||||||
rootCmd.PersistentFlags().String("database-hostname", "localhost", "database hostname")
|
rootCmd.PersistentFlags().String("database-hostname", "localhost", "database hostname")
|
||||||
rootCmd.PersistentFlags().String("database-user", "", "database user")
|
rootCmd.PersistentFlags().String("database-user", "", "database user")
|
||||||
@ -138,7 +139,9 @@ func init() {
|
|||||||
rootCmd.PersistentFlags().String("database-type", "postgres", "database type (currently supported: postgres, dump)")
|
rootCmd.PersistentFlags().String("database-type", "postgres", "database type (currently supported: postgres, dump)")
|
||||||
rootCmd.PersistentFlags().String("database-driver", "sqlx", "database driver type (currently supported: sqlx, pgx)")
|
rootCmd.PersistentFlags().String("database-driver", "sqlx", "database driver type (currently supported: sqlx, pgx)")
|
||||||
rootCmd.PersistentFlags().String("database-dump-dst", "stdout", "dump destination (for database-type=dump; options: stdout, stderr, discard)")
|
rootCmd.PersistentFlags().String("database-dump-dst", "stdout", "dump destination (for database-type=dump; options: stdout, stderr, discard)")
|
||||||
rootCmd.PersistentFlags().String("database-file-path", "", "full file path (for database-type=file)")
|
rootCmd.PersistentFlags().String("database-file-mode", "csv", "mode for writing file (for database-type=file; options: csv, sql)")
|
||||||
|
rootCmd.PersistentFlags().String("database-file-csv-dir", "", "full directory path (for database-file-mode=csv)")
|
||||||
|
rootCmd.PersistentFlags().String("database-file-path", "", "full file path (for database-file-mode=sql)")
|
||||||
|
|
||||||
rootCmd.PersistentFlags().String("eth-node-id", "", "eth node id")
|
rootCmd.PersistentFlags().String("eth-node-id", "", "eth node id")
|
||||||
rootCmd.PersistentFlags().String("eth-client-name", "eth-statediff-service", "eth client name")
|
rootCmd.PersistentFlags().String("eth-client-name", "eth-statediff-service", "eth client name")
|
||||||
@ -156,10 +159,13 @@ func init() {
|
|||||||
rootCmd.PersistentFlags().Bool("prom-db-stats", false, "enables prometheus db stats")
|
rootCmd.PersistentFlags().Bool("prom-db-stats", false, "enables prometheus db stats")
|
||||||
rootCmd.PersistentFlags().Bool("prom-metrics", false, "enable prometheus metrics")
|
rootCmd.PersistentFlags().Bool("prom-metrics", false, "enable prometheus metrics")
|
||||||
|
|
||||||
rootCmd.PersistentFlags().Bool("run-include-block", true, "include block data in the statediff payload")
|
rootCmd.PersistentFlags().Bool("prerun-only", false, "only process pre-configured ranges; exit afterwards")
|
||||||
rootCmd.PersistentFlags().Bool("run-include-receipts", true, "include receipts in the statediff payload")
|
rootCmd.PersistentFlags().Int("prerun-start", 0, "start height for a prerun range")
|
||||||
rootCmd.PersistentFlags().Bool("run-include-td", true, "include td in the statediff payload")
|
rootCmd.PersistentFlags().Int("prerun-stop", 0, "stop height for a prerun range")
|
||||||
rootCmd.PersistentFlags().Bool("run-include-code", true, "include code and codehash mappings in statediff payload")
|
rootCmd.PersistentFlags().Bool("prerun-include-block", true, "include block data in the statediff payload")
|
||||||
|
rootCmd.PersistentFlags().Bool("prerun-include-receipts", true, "include receipts in the statediff payload")
|
||||||
|
rootCmd.PersistentFlags().Bool("prerun-include-td", true, "include td in the statediff payload")
|
||||||
|
rootCmd.PersistentFlags().Bool("prerun-include-code", true, "include code and codehash mappings in statediff payload")
|
||||||
|
|
||||||
viper.BindPFlag("server.httpPath", rootCmd.PersistentFlags().Lookup("http-path"))
|
viper.BindPFlag("server.httpPath", rootCmd.PersistentFlags().Lookup("http-path"))
|
||||||
viper.BindPFlag("server.ipcPath", rootCmd.PersistentFlags().Lookup("ipc-path"))
|
viper.BindPFlag("server.ipcPath", rootCmd.PersistentFlags().Lookup("ipc-path"))
|
||||||
@ -167,6 +173,7 @@ func init() {
|
|||||||
viper.BindPFlag("log.file", rootCmd.PersistentFlags().Lookup("log-file"))
|
viper.BindPFlag("log.file", rootCmd.PersistentFlags().Lookup("log-file"))
|
||||||
viper.BindPFlag("log.level", rootCmd.PersistentFlags().Lookup("log-level"))
|
viper.BindPFlag("log.level", rootCmd.PersistentFlags().Lookup("log-level"))
|
||||||
|
|
||||||
|
viper.BindPFlag("statediff.prerun", rootCmd.PersistentFlags().Lookup("prerun"))
|
||||||
viper.BindPFlag("statediff.serviceWorkers", rootCmd.PersistentFlags().Lookup("service-workers"))
|
viper.BindPFlag("statediff.serviceWorkers", rootCmd.PersistentFlags().Lookup("service-workers"))
|
||||||
viper.BindPFlag("statediff.trieWorkers", rootCmd.PersistentFlags().Lookup("trie-workers"))
|
viper.BindPFlag("statediff.trieWorkers", rootCmd.PersistentFlags().Lookup("trie-workers"))
|
||||||
viper.BindPFlag("statediff.workerQueueSize", rootCmd.PersistentFlags().Lookup("worker-queue-size"))
|
viper.BindPFlag("statediff.workerQueueSize", rootCmd.PersistentFlags().Lookup("worker-queue-size"))
|
||||||
@ -190,6 +197,8 @@ func init() {
|
|||||||
viper.BindPFlag("database.type", rootCmd.PersistentFlags().Lookup("database-type"))
|
viper.BindPFlag("database.type", rootCmd.PersistentFlags().Lookup("database-type"))
|
||||||
viper.BindPFlag("database.driver", rootCmd.PersistentFlags().Lookup("database-driver"))
|
viper.BindPFlag("database.driver", rootCmd.PersistentFlags().Lookup("database-driver"))
|
||||||
viper.BindPFlag("database.dumpDestination", rootCmd.PersistentFlags().Lookup("database-dump-dst"))
|
viper.BindPFlag("database.dumpDestination", rootCmd.PersistentFlags().Lookup("database-dump-dst"))
|
||||||
|
viper.BindPFlag("database.fileMode", rootCmd.PersistentFlags().Lookup("database-file-mode"))
|
||||||
|
viper.BindPFlag("database.fileCsvDir", rootCmd.PersistentFlags().Lookup("database-file-csv-dir"))
|
||||||
viper.BindPFlag("database.filePath", rootCmd.PersistentFlags().Lookup("database-file-path"))
|
viper.BindPFlag("database.filePath", rootCmd.PersistentFlags().Lookup("database-file-path"))
|
||||||
|
|
||||||
viper.BindPFlag("ethereum.nodeID", rootCmd.PersistentFlags().Lookup("eth-node-id"))
|
viper.BindPFlag("ethereum.nodeID", rootCmd.PersistentFlags().Lookup("eth-node-id"))
|
||||||
@ -208,10 +217,16 @@ func init() {
|
|||||||
viper.BindPFlag("prom.dbStats", rootCmd.PersistentFlags().Lookup("prom-db-stats"))
|
viper.BindPFlag("prom.dbStats", rootCmd.PersistentFlags().Lookup("prom-db-stats"))
|
||||||
viper.BindPFlag("prom.metrics", rootCmd.PersistentFlags().Lookup("prom-metrics"))
|
viper.BindPFlag("prom.metrics", rootCmd.PersistentFlags().Lookup("prom-metrics"))
|
||||||
|
|
||||||
viper.BindPFlag("run.params.includeBlock", rootCmd.PersistentFlags().Lookup("run-include-block"))
|
viper.BindPFlag("prerun.only", rootCmd.PersistentFlags().Lookup("prerun-only"))
|
||||||
viper.BindPFlag("run.params.includeReceipts", rootCmd.PersistentFlags().Lookup("run-include-receipts"))
|
viper.BindPFlag("prerun.parallel", rootCmd.PersistentFlags().Lookup("prerun-parallel"))
|
||||||
viper.BindPFlag("run.params.includeTD", rootCmd.PersistentFlags().Lookup("run-include-td"))
|
viper.BindPFlag("prerun.start", rootCmd.PersistentFlags().Lookup("prerun-start"))
|
||||||
viper.BindPFlag("run.params.includeCode", rootCmd.PersistentFlags().Lookup("run-include-code"))
|
viper.BindPFlag("prerun.stop", rootCmd.PersistentFlags().Lookup("prerun-stop"))
|
||||||
|
viper.BindPFlag("prerun.params.includeBlock", rootCmd.PersistentFlags().Lookup("prerun-include-block"))
|
||||||
|
viper.BindPFlag("prerun.params.includeReceipts", rootCmd.PersistentFlags().Lookup("prerun-include-receipts"))
|
||||||
|
viper.BindPFlag("prerun.params.includeTD", rootCmd.PersistentFlags().Lookup("prerun-include-td"))
|
||||||
|
viper.BindPFlag("prerun.params.includeCode", rootCmd.PersistentFlags().Lookup("prerun-include-code"))
|
||||||
|
|
||||||
|
viper.BindPFlag("debug.pprof", rootCmd.PersistentFlags().Lookup("debug-pprof"))
|
||||||
|
|
||||||
rand.Seed(time.Now().UnixNano())
|
rand.Seed(time.Now().UnixNano())
|
||||||
}
|
}
|
||||||
@ -284,18 +299,35 @@ func getConfig(nodeInfo node.Info) (interfaces.Config, error) {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
logWithCommand.Infof("configuring service for database type: %s", dbType)
|
logWithCommand.Debugf("Configuring service for database type: %s", dbType)
|
||||||
var indexerConfig interfaces.Config
|
var indexerConfig interfaces.Config
|
||||||
switch dbType {
|
switch dbType {
|
||||||
case shared.FILE:
|
case shared.FILE:
|
||||||
logWithCommand.Info("starting in sql file writing mode")
|
logWithCommand.Info("Starting in sql file writing mode")
|
||||||
filePathStr := viper.GetString("database.filePath")
|
|
||||||
if filePathStr == "" {
|
fileModeStr := viper.GetString("database.fileMode")
|
||||||
logWithCommand.Fatal("when operating in sql file writing mode a file path must be provided")
|
fileMode, err := file.ResolveFileMode(fileModeStr)
|
||||||
|
if err != nil {
|
||||||
|
utils.Fatalf("%v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
filePathStr := viper.GetString("database.filePath")
|
||||||
|
if fileMode == file.SQL && filePathStr == "" {
|
||||||
|
logWithCommand.Fatal("When operating in sql file writing mode a file path must be provided")
|
||||||
|
}
|
||||||
|
|
||||||
|
fileCsvDirStr := viper.GetString("database.fileCsvDir")
|
||||||
|
if fileMode == file.CSV && fileCsvDirStr == "" {
|
||||||
|
logWithCommand.Fatal("When operating in csv file writing mode a directory path must be provided")
|
||||||
|
}
|
||||||
|
|
||||||
|
indexerConfig = file.Config{
|
||||||
|
Mode: fileMode,
|
||||||
|
OutputDir: fileCsvDirStr,
|
||||||
|
FilePath: filePathStr,
|
||||||
}
|
}
|
||||||
indexerConfig = file.Config{FilePath: filePathStr}
|
|
||||||
case shared.DUMP:
|
case shared.DUMP:
|
||||||
logWithCommand.Info("starting in data dump mode")
|
logWithCommand.Info("Starting in data dump mode")
|
||||||
dumpDstStr := viper.GetString("database.dumpDestination")
|
dumpDstStr := viper.GetString("database.dumpDestination")
|
||||||
dumpDst, err := dump.ResolveDumpType(dumpDstStr)
|
dumpDst, err := dump.ResolveDumpType(dumpDstStr)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -312,7 +344,7 @@ func getConfig(nodeInfo node.Info) (interfaces.Config, error) {
|
|||||||
return nil, fmt.Errorf("unrecognized dump destination: %s", dumpDst)
|
return nil, fmt.Errorf("unrecognized dump destination: %s", dumpDst)
|
||||||
}
|
}
|
||||||
case shared.POSTGRES:
|
case shared.POSTGRES:
|
||||||
logWithCommand.Info("starting in postgres mode")
|
logWithCommand.Info("Starting in postgres mode")
|
||||||
driverTypeStr := viper.GetString("database.driver")
|
driverTypeStr := viper.GetString("database.driver")
|
||||||
driverType, err := postgres.ResolveDriverType(driverTypeStr)
|
driverType, err := postgres.ResolveDriverType(driverTypeStr)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -324,8 +356,6 @@ func getConfig(nodeInfo node.Info) (interfaces.Config, error) {
|
|||||||
DatabaseName: viper.GetString("database.name"),
|
DatabaseName: viper.GetString("database.name"),
|
||||||
Username: viper.GetString("database.user"),
|
Username: viper.GetString("database.user"),
|
||||||
Password: viper.GetString("database.password"),
|
Password: viper.GetString("database.password"),
|
||||||
ID: nodeInfo.ID,
|
|
||||||
ClientName: nodeInfo.ClientName,
|
|
||||||
Driver: driverType,
|
Driver: driverType,
|
||||||
}
|
}
|
||||||
if viper.IsSet("database.maxIdle") {
|
if viper.IsSet("database.maxIdle") {
|
||||||
|
91
cmd/run.go
91
cmd/run.go
@ -1,91 +0,0 @@
|
|||||||
// Copyright © 2023 Vulcanize, Inc
|
|
||||||
//
|
|
||||||
// This program is free software: you can redistribute it and/or modify
|
|
||||||
// it under the terms of the GNU Affero General Public License as published by
|
|
||||||
// the Free Software Foundation, either version 3 of the License, or
|
|
||||||
// (at your option) any later version.
|
|
||||||
//
|
|
||||||
// This program is distributed in the hope that it will be useful,
|
|
||||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
// GNU Affero General Public License for more details.
|
|
||||||
//
|
|
||||||
// You should have received a copy of the GNU Affero General Public License
|
|
||||||
// along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
package cmd
|
|
||||||
|
|
||||||
import (
|
|
||||||
"os"
|
|
||||||
"os/signal"
|
|
||||||
"sync"
|
|
||||||
|
|
||||||
statediff "github.com/cerc-io/plugeth-statediff"
|
|
||||||
"github.com/ethereum/go-ethereum/common"
|
|
||||||
"github.com/sirupsen/logrus"
|
|
||||||
"github.com/spf13/cobra"
|
|
||||||
"github.com/spf13/viper"
|
|
||||||
|
|
||||||
pkg "github.com/vulcanize/eth-statediff-service/pkg"
|
|
||||||
)
|
|
||||||
|
|
||||||
// serveCmd represents the serve command
|
|
||||||
var runCmd = &cobra.Command{
|
|
||||||
Use: "run",
|
|
||||||
Short: "Produce diffs for a specific block range",
|
|
||||||
Long: `Usage
|
|
||||||
|
|
||||||
./eth-statediff-service run --config={path to toml config file}`,
|
|
||||||
Run: func(cmd *cobra.Command, args []string) {
|
|
||||||
subCommand = cmd.CalledAs()
|
|
||||||
logWithCommand = *logrus.WithField("SubCommand", subCommand)
|
|
||||||
runRanges()
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
rootCmd.AddCommand(runCmd)
|
|
||||||
}
|
|
||||||
|
|
||||||
func runRanges() {
|
|
||||||
service := createStateDiffService()
|
|
||||||
// start service and servers
|
|
||||||
var wg sync.WaitGroup
|
|
||||||
ranges := getConfiguredRanges()
|
|
||||||
service.Run(ranges)
|
|
||||||
|
|
||||||
// clean shutdown
|
|
||||||
shutdown := make(chan os.Signal)
|
|
||||||
signal.Notify(shutdown, os.Interrupt)
|
|
||||||
<-shutdown
|
|
||||||
logWithCommand.Info("Received interrupt signal, shutting down")
|
|
||||||
service.Stop()
|
|
||||||
wg.Wait()
|
|
||||||
}
|
|
||||||
|
|
||||||
func getConfiguredRanges() []pkg.RangeRequest {
|
|
||||||
params := statediff.Params{
|
|
||||||
IncludeBlock: viper.GetBool("run.params.includeBlock"),
|
|
||||||
IncludeReceipts: viper.GetBool("run.params.includeReceipts"),
|
|
||||||
IncludeTD: viper.GetBool("run.params.includeTD"),
|
|
||||||
IncludeCode: viper.GetBool("run.params.includeCode"),
|
|
||||||
}
|
|
||||||
var addrStrs []string
|
|
||||||
viper.UnmarshalKey("run.params.watchedAddresses", &addrStrs)
|
|
||||||
addrs := make([]common.Address, len(addrStrs))
|
|
||||||
for i, addrStr := range addrStrs {
|
|
||||||
addrs[i] = common.HexToAddress(addrStr)
|
|
||||||
}
|
|
||||||
params.WatchedAddresses = addrs
|
|
||||||
var rawRanges []blockRange
|
|
||||||
viper.UnmarshalKey("run.ranges", &rawRanges)
|
|
||||||
blockRanges := make([]pkg.RangeRequest, len(rawRanges))
|
|
||||||
for i, rawRange := range rawRanges {
|
|
||||||
blockRanges[i] = pkg.RangeRequest{
|
|
||||||
Start: rawRange[0],
|
|
||||||
Stop: rawRange[1],
|
|
||||||
Params: params,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return blockRanges
|
|
||||||
}
|
|
60
cmd/serve.go
60
cmd/serve.go
@ -16,8 +16,11 @@
|
|||||||
package cmd
|
package cmd
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"net/http"
|
||||||
|
_ "net/http/pprof"
|
||||||
"os"
|
"os"
|
||||||
"os/signal"
|
"os/signal"
|
||||||
|
"runtime"
|
||||||
"sync"
|
"sync"
|
||||||
|
|
||||||
"github.com/ethereum/go-ethereum/rpc"
|
"github.com/ethereum/go-ethereum/rpc"
|
||||||
@ -25,8 +28,8 @@ import (
|
|||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
"github.com/spf13/viper"
|
"github.com/spf13/viper"
|
||||||
|
|
||||||
sd "github.com/vulcanize/eth-statediff-service/pkg"
|
pkg "github.com/cerc-io/eth-statediff-service/pkg"
|
||||||
srpc "github.com/vulcanize/eth-statediff-service/pkg/rpc"
|
srpc "github.com/cerc-io/eth-statediff-service/pkg/rpc"
|
||||||
)
|
)
|
||||||
|
|
||||||
// serveCmd represents the serve command
|
// serveCmd represents the serve command
|
||||||
@ -47,22 +50,57 @@ func init() {
|
|||||||
rootCmd.AddCommand(serveCmd)
|
rootCmd.AddCommand(serveCmd)
|
||||||
}
|
}
|
||||||
|
|
||||||
func serve() {
|
func maxParallelism() int {
|
||||||
logWithCommand.Info("Running eth-statediff-service serve command")
|
maxProcs := runtime.GOMAXPROCS(0)
|
||||||
|
numCPU := runtime.NumCPU()
|
||||||
|
if maxProcs < numCPU {
|
||||||
|
return maxProcs
|
||||||
|
}
|
||||||
|
return numCPU
|
||||||
|
}
|
||||||
|
|
||||||
service := createStateDiffService()
|
func serve() {
|
||||||
|
logWithCommand.Debug("Running eth-statediff-service serve command")
|
||||||
|
logWithCommand.Debugf("Parallelism: %d", maxParallelism())
|
||||||
|
|
||||||
|
reader, chainConf, nodeInfo := instantiateLevelDBReader()
|
||||||
|
|
||||||
|
reportLatestBlock(reader)
|
||||||
|
|
||||||
|
service, err := createStateDiffService(reader, chainConf, nodeInfo)
|
||||||
|
if err != nil {
|
||||||
|
logWithCommand.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Enable the pprof agent if configured
|
||||||
|
if viper.GetBool("debug.pprof") {
|
||||||
|
// See: https://www.farsightsecurity.com/blog/txt-record/go-remote-profiling-20161028/
|
||||||
|
// For security reasons: do not use the default http multiplexor elsewhere in this process.
|
||||||
|
go func() {
|
||||||
|
logWithCommand.Info("Starting pprof listener on port 6060")
|
||||||
|
logWithCommand.Fatal(http.ListenAndServe("localhost:6060", nil))
|
||||||
|
}()
|
||||||
|
}
|
||||||
|
|
||||||
|
// short circuit if we only want to perform prerun
|
||||||
|
if viper.GetBool("prerun.only") {
|
||||||
|
parallel := viper.GetBool("prerun.parallel")
|
||||||
|
if err := service.Run(nil, parallel); err != nil {
|
||||||
|
logWithCommand.Fatalf("Unable to perform prerun: %v", err)
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
// start service and servers
|
// start service and servers
|
||||||
logWithCommand.Info("Starting statediff service")
|
|
||||||
var wg sync.WaitGroup
|
var wg sync.WaitGroup
|
||||||
if err := service.Loop(&wg); err != nil {
|
if err := service.Loop(&wg); err != nil {
|
||||||
logWithCommand.Fatalf("unable to start statediff service: %v", err)
|
logWithCommand.Fatalf("unable to start statediff service: %v", err)
|
||||||
}
|
}
|
||||||
logWithCommand.Info("Starting RPC servers")
|
|
||||||
if err := startServers(service); err != nil {
|
if err := startServers(service); err != nil {
|
||||||
logWithCommand.Fatal(err)
|
logWithCommand.Fatal(err)
|
||||||
}
|
}
|
||||||
logWithCommand.Info("RPC servers successfully spun up; awaiting requests")
|
logWithCommand.Debug("RPC servers successfully spun up; awaiting requests")
|
||||||
|
|
||||||
// clean shutdown
|
// clean shutdown
|
||||||
shutdown := make(chan os.Signal)
|
shutdown := make(chan os.Signal)
|
||||||
@ -73,21 +111,19 @@ func serve() {
|
|||||||
wg.Wait()
|
wg.Wait()
|
||||||
}
|
}
|
||||||
|
|
||||||
func startServers(serv *sd.Service) error {
|
func startServers(serv *pkg.Service) error {
|
||||||
ipcPath := viper.GetString("server.ipcPath")
|
ipcPath := viper.GetString("server.ipcPath")
|
||||||
httpPath := viper.GetString("server.httpPath")
|
httpPath := viper.GetString("server.httpPath")
|
||||||
if ipcPath == "" && httpPath == "" {
|
if ipcPath == "" && httpPath == "" {
|
||||||
logWithCommand.Fatal("need an ipc path and/or an http path")
|
logWithCommand.Fatal("Need an IPC path and/or an HTTP path")
|
||||||
}
|
}
|
||||||
if ipcPath != "" {
|
if ipcPath != "" {
|
||||||
logWithCommand.Info("starting up IPC server")
|
|
||||||
_, _, err := srpc.StartIPCEndpoint(ipcPath, serv.APIs())
|
_, _, err := srpc.StartIPCEndpoint(ipcPath, serv.APIs())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if httpPath != "" {
|
if httpPath != "" {
|
||||||
logWithCommand.Info("starting up HTTP server")
|
|
||||||
_, err := srpc.StartHTTPEndpoint(httpPath, serv.APIs(), []string{"statediff"}, nil, []string{"*"}, rpc.HTTPTimeouts{})
|
_, err := srpc.StartHTTPEndpoint(httpPath, serv.APIs(), []string{"statediff"}, nil, []string{"*"}, rpc.HTTPTimeouts{})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
46
cmd/stats.go
Normal file
46
cmd/stats.go
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
// Copyright © 2022 Vulcanize, Inc
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU Affero General Public License as published by
|
||||||
|
// the Free Software Foundation, either version 3 of the License, or
|
||||||
|
// (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU Affero General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Affero General Public License
|
||||||
|
// along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/sirupsen/logrus"
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
)
|
||||||
|
|
||||||
|
// statsCmd represents the serve command
|
||||||
|
var statsCmd = &cobra.Command{
|
||||||
|
Use: "stats",
|
||||||
|
Short: "Report stats for cold levelDB",
|
||||||
|
Long: `Usage
|
||||||
|
|
||||||
|
./eth-statediff-service stats --config={path to toml config file}`,
|
||||||
|
Run: func(cmd *cobra.Command, args []string) {
|
||||||
|
subCommand = cmd.CalledAs()
|
||||||
|
logWithCommand = *logrus.WithField("SubCommand", subCommand)
|
||||||
|
stats()
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
rootCmd.AddCommand(statsCmd)
|
||||||
|
}
|
||||||
|
|
||||||
|
func stats() {
|
||||||
|
logWithCommand.Info("Running eth-statediff-service stats command")
|
||||||
|
|
||||||
|
reader, _, _ := instantiateLevelDBReader()
|
||||||
|
reportLatestBlock(reader)
|
||||||
|
}
|
163
cmd/util.go
163
cmd/util.go
@ -6,20 +6,114 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
|
|
||||||
ind "github.com/cerc-io/plugeth-statediff/indexer"
|
statediff "github.com/cerc-io/plugeth-statediff"
|
||||||
|
"github.com/cerc-io/plugeth-statediff/indexer"
|
||||||
|
"github.com/cerc-io/plugeth-statediff/indexer/node"
|
||||||
|
"github.com/cerc-io/plugeth-statediff/indexer/shared"
|
||||||
"github.com/cerc-io/plugeth-statediff/utils/log"
|
"github.com/cerc-io/plugeth-statediff/utils/log"
|
||||||
|
"github.com/ethereum/go-ethereum/common"
|
||||||
"github.com/ethereum/go-ethereum/params"
|
"github.com/ethereum/go-ethereum/params"
|
||||||
"github.com/ethereum/go-ethereum/trie"
|
"github.com/ethereum/go-ethereum/trie"
|
||||||
"github.com/spf13/viper"
|
"github.com/spf13/viper"
|
||||||
|
|
||||||
sd "github.com/vulcanize/eth-statediff-service/pkg"
|
pkg "github.com/cerc-io/eth-statediff-service/pkg"
|
||||||
|
"github.com/cerc-io/eth-statediff-service/pkg/prom"
|
||||||
)
|
)
|
||||||
|
|
||||||
type blockRange [2]uint64
|
type blockRange [2]uint64
|
||||||
|
|
||||||
func createStateDiffService() *sd.Service {
|
func createStateDiffService(lvlDBReader pkg.Reader, chainConf *params.ChainConfig, nodeInfo node.Info) (*pkg.Service, error) {
|
||||||
|
// create statediff service
|
||||||
|
logWithCommand.Debug("Setting up database")
|
||||||
|
conf, err := getConfig(nodeInfo)
|
||||||
|
if err != nil {
|
||||||
|
logWithCommand.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
logWithCommand.Debug("Creating statediff indexer")
|
||||||
|
db, indexer, err := indexer.NewStateDiffIndexer(context.Background(), chainConf, nodeInfo, conf)
|
||||||
|
if err != nil {
|
||||||
|
logWithCommand.Fatal(err)
|
||||||
|
}
|
||||||
|
if conf.Type() == shared.POSTGRES && viper.GetBool("prom.dbStats") {
|
||||||
|
prom.RegisterDBCollector(viper.GetString("database.name"), db)
|
||||||
|
}
|
||||||
|
|
||||||
|
logWithCommand.Debug("Creating statediff service")
|
||||||
|
sdConf := pkg.ServiceConfig{
|
||||||
|
ServiceWorkers: viper.GetUint("statediff.serviceWorkers"),
|
||||||
|
TrieWorkers: viper.GetUint("statediff.trieWorkers"),
|
||||||
|
WorkerQueueSize: viper.GetUint("statediff.workerQueueSize"),
|
||||||
|
PreRuns: setupPreRunRanges(),
|
||||||
|
}
|
||||||
|
return pkg.NewStateDiffService(lvlDBReader, indexer, sdConf), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func setupPreRunRanges() []pkg.RangeRequest {
|
||||||
|
if !viper.GetBool("statediff.prerun") {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
preRunParams := statediff.Params{
|
||||||
|
IncludeBlock: viper.GetBool("prerun.params.includeBlock"),
|
||||||
|
IncludeReceipts: viper.GetBool("prerun.params.includeReceipts"),
|
||||||
|
IncludeTD: viper.GetBool("prerun.params.includeTD"),
|
||||||
|
IncludeCode: viper.GetBool("prerun.params.includeCode"),
|
||||||
|
}
|
||||||
|
var addrStrs []string
|
||||||
|
viper.UnmarshalKey("prerun.params.watchedAddresses", &addrStrs)
|
||||||
|
addrs := make([]common.Address, len(addrStrs))
|
||||||
|
for i, addrStr := range addrStrs {
|
||||||
|
addrs[i] = common.HexToAddress(addrStr)
|
||||||
|
}
|
||||||
|
preRunParams.WatchedAddresses = addrs
|
||||||
|
var rawRanges []blockRange
|
||||||
|
viper.UnmarshalKey("prerun.ranges", &rawRanges)
|
||||||
|
blockRanges := make([]pkg.RangeRequest, len(rawRanges))
|
||||||
|
for i, rawRange := range rawRanges {
|
||||||
|
blockRanges[i] = pkg.RangeRequest{
|
||||||
|
Start: rawRange[0],
|
||||||
|
Stop: rawRange[1],
|
||||||
|
Params: preRunParams,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if viper.IsSet("prerun.start") && viper.IsSet("prerun.stop") {
|
||||||
|
hardStart := viper.GetInt("prerun.start")
|
||||||
|
hardStop := viper.GetInt("prerun.stop")
|
||||||
|
blockRanges = append(blockRanges, pkg.RangeRequest{
|
||||||
|
Start: uint64(hardStart),
|
||||||
|
Stop: uint64(hardStop),
|
||||||
|
Params: preRunParams,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return blockRanges
|
||||||
|
}
|
||||||
|
|
||||||
|
// LoadConfig loads chain config from json file
|
||||||
|
func LoadConfig(chainConfigPath string) (*params.ChainConfig, error) {
|
||||||
|
file, err := os.Open(chainConfigPath)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Failed to read chain config file", "error", err)
|
||||||
|
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer file.Close()
|
||||||
|
|
||||||
|
chainConfig := new(params.ChainConfig)
|
||||||
|
if err := json.NewDecoder(file).Decode(chainConfig); err != nil {
|
||||||
|
log.Error("invalid chain config file", "error", err)
|
||||||
|
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Debug(fmt.Sprintf("Using chain config from '%s'. Content: %+v", chainConfigPath, chainConfig))
|
||||||
|
|
||||||
|
return chainConfig, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func instantiateLevelDBReader() (pkg.Reader, *params.ChainConfig, node.Info) {
|
||||||
// load some necessary params
|
// load some necessary params
|
||||||
logWithCommand.Info("Loading statediff service parameters")
|
logWithCommand.Debug("Loading statediff service parameters")
|
||||||
mode := viper.GetString("leveldb.mode")
|
mode := viper.GetString("leveldb.mode")
|
||||||
path := viper.GetString("leveldb.path")
|
path := viper.GetString("leveldb.path")
|
||||||
ancientPath := viper.GetString("leveldb.ancient")
|
ancientPath := viper.GetString("leveldb.ancient")
|
||||||
@ -39,17 +133,15 @@ func createStateDiffService() *sd.Service {
|
|||||||
|
|
||||||
nodeInfo := getEthNodeInfo()
|
nodeInfo := getEthNodeInfo()
|
||||||
|
|
||||||
var chainConf *params.ChainConfig
|
|
||||||
var err error
|
|
||||||
chainConfigPath := viper.GetString("ethereum.chainConfig")
|
chainConfigPath := viper.GetString("ethereum.chainConfig")
|
||||||
chainConf, err = LoadConfig(chainConfigPath)
|
chainConf, err := LoadConfig(chainConfigPath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logWithCommand.Fatal(err)
|
logWithCommand.Fatalf("Unable to instantiate chain config: %s", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// create LevelDB reader
|
// create LevelDB reader
|
||||||
logWithCommand.Info("Creating LevelDB reader")
|
logWithCommand.Debug("Creating LevelDB reader")
|
||||||
readerConf := sd.LvLDBReaderConfig{
|
readerConf := pkg.LvLDBReaderConfig{
|
||||||
TrieConfig: &trie.Config{
|
TrieConfig: &trie.Config{
|
||||||
Cache: viper.GetInt("cache.trie"),
|
Cache: viper.GetInt("cache.trie"),
|
||||||
Journal: "",
|
Journal: "",
|
||||||
@ -62,49 +154,24 @@ func createStateDiffService() *sd.Service {
|
|||||||
Url: url,
|
Url: url,
|
||||||
DBCacheSize: viper.GetInt("cache.database"),
|
DBCacheSize: viper.GetInt("cache.database"),
|
||||||
}
|
}
|
||||||
lvlDBReader, err := sd.NewLvlDBReader(readerConf)
|
reader, err := pkg.NewLvlDBReader(readerConf)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logWithCommand.Fatal(err)
|
logWithCommand.Fatalf("Unable to instantiate levelDB reader: %s", err)
|
||||||
}
|
}
|
||||||
|
return reader, chainConf, nodeInfo
|
||||||
// create statediff service
|
|
||||||
logWithCommand.Info("Setting up database")
|
|
||||||
conf, err := getConfig(nodeInfo)
|
|
||||||
if err != nil {
|
|
||||||
logWithCommand.Fatal(err)
|
|
||||||
}
|
|
||||||
logWithCommand.Info("Creating statediff indexer")
|
|
||||||
_, indexer, err := ind.NewStateDiffIndexer(context.Background(), chainConf, nodeInfo, conf)
|
|
||||||
if err != nil {
|
|
||||||
logWithCommand.Fatal(err)
|
|
||||||
}
|
|
||||||
logWithCommand.Info("Creating statediff service")
|
|
||||||
sdConf := sd.ServiceConfig{
|
|
||||||
ServiceWorkers: viper.GetUint("statediff.serviceWorkers"),
|
|
||||||
TrieWorkers: viper.GetUint("statediff.trieWorkers"),
|
|
||||||
WorkerQueueSize: viper.GetUint("statediff.workerQueueSize"),
|
|
||||||
}
|
|
||||||
return sd.NewStateDiffService(lvlDBReader, indexer, sdConf)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// LoadConfig loads chain config from json file
|
// report latest block info
|
||||||
func LoadConfig(chainConfigPath string) (*params.ChainConfig, error) {
|
func reportLatestBlock(reader pkg.Reader) {
|
||||||
file, err := os.Open(chainConfigPath)
|
header, err := reader.GetLatestHeader()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error(fmt.Sprintf("Failed to read chain config file: %v", err))
|
logWithCommand.Fatalf("Unable to determine latest header height and hash: %s", err.Error())
|
||||||
|
|
||||||
return nil, err
|
|
||||||
}
|
}
|
||||||
defer file.Close()
|
if header.Number == nil {
|
||||||
|
logWithCommand.Fatal("Latest header found in levelDB has a nil block height")
|
||||||
chainConfig := new(params.ChainConfig)
|
|
||||||
if err := json.NewDecoder(file).Decode(chainConfig); err != nil {
|
|
||||||
log.Error(fmt.Sprintf("invalid chain config file: %v", err))
|
|
||||||
|
|
||||||
return nil, err
|
|
||||||
}
|
}
|
||||||
|
logWithCommand.
|
||||||
log.Info(fmt.Sprintf("Using chain config from %s file. Content %+v", chainConfigPath, chainConfig))
|
WithField("height", header.Number).
|
||||||
|
WithField("hash", header.Hash()).
|
||||||
return chainConfig, nil
|
Info("Latest block found in levelDB")
|
||||||
}
|
}
|
||||||
|
@ -20,7 +20,7 @@ import (
|
|||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
|
|
||||||
v "github.com/vulcanize/eth-statediff-service/version"
|
v "github.com/cerc-io/eth-statediff-service/version"
|
||||||
)
|
)
|
||||||
|
|
||||||
// versionCmd represents the version command
|
// versionCmd represents the version command
|
||||||
|
@ -1,21 +0,0 @@
|
|||||||
services:
|
|
||||||
eth-statediff-service:
|
|
||||||
build:
|
|
||||||
context: ./
|
|
||||||
cache_from:
|
|
||||||
- alpine:latest
|
|
||||||
- golang:1.18
|
|
||||||
dockerfile: ./Dockerfile
|
|
||||||
args:
|
|
||||||
USER: "vdbm"
|
|
||||||
CONFIG_FILE: ./environments/docker.toml
|
|
||||||
EXPOSE_PORT: 8545
|
|
||||||
environment:
|
|
||||||
- VDB_COMMAND=serve
|
|
||||||
volumes:
|
|
||||||
- eth-statediff-service-data:/vdbm/.ethereum/
|
|
||||||
ports:
|
|
||||||
- "127.0.0.1:8545:8545"
|
|
||||||
|
|
||||||
volumes:
|
|
||||||
eth-statediff-service-data:
|
|
@ -1,59 +1,104 @@
|
|||||||
[leveldb]
|
[leveldb]
|
||||||
mode = "local"
|
# LevelDB access mode <local | remote>
|
||||||
path = "/home/user/.ethereum/geth/chaindata"
|
mode = "local" # LEVELDB_MODE
|
||||||
ancient = "/home/user/.ethereum/geth/chaindata/ancient"
|
|
||||||
url = "http://127.0.0.1:8082/"
|
# LevelDB paths (local mode)
|
||||||
|
path = "/Users/user/Library/Ethereum/geth/chaindata" # LEVELDB_PATH
|
||||||
|
ancient = "/Users/user/Library/Ethereum/geth/chaindata/ancient" # LEVELDB_ANCIENT
|
||||||
|
|
||||||
|
# URL for leveldb-ethdb-rpc endpoint (remote mode)
|
||||||
|
url = "http://127.0.0.1:8082/" # LEVELDB_URL
|
||||||
|
|
||||||
[server]
|
[server]
|
||||||
ipcPath = ".ipc"
|
ipcPath = ".ipc" # SERVICE_IPC_PATH
|
||||||
httpPath = "127.0.0.1:8545"
|
httpPath = "127.0.0.1:8545" # SERVICE_HTTP_PATH
|
||||||
|
|
||||||
[statediff]
|
[statediff]
|
||||||
serviceWorkers = 1
|
prerun = true # STATEDIFF_PRERUN
|
||||||
workerQueueSize = 1024
|
serviceWorkers = 1 # STATEDIFF_SERVICE_WORKERS
|
||||||
trieWorkers = 4
|
workerQueueSize = 1024 # STATEDIFF_WORKER_QUEUE_SIZE
|
||||||
|
trieWorkers = 4 # STATEDIFF_TRIE_WORKERS
|
||||||
|
|
||||||
[run]
|
[prerun]
|
||||||
|
only = false # PRERUN_ONLY
|
||||||
|
parallel = true # PRERUN_PARALLEL
|
||||||
|
|
||||||
|
# to perform prerun in a specific range (optional)
|
||||||
|
start = 0 # PRERUN_RANGE_START
|
||||||
|
stop = 100 # PRERUN_RANGE_STOP
|
||||||
|
|
||||||
|
# to perform prerun over multiple ranges (optional)
|
||||||
ranges = [
|
ranges = [
|
||||||
[0, 1000]
|
[101, 1000]
|
||||||
]
|
]
|
||||||
[run.params]
|
|
||||||
includeBlock = true
|
# statediffing params for prerun
|
||||||
includeReceipts = true
|
[prerun.params]
|
||||||
includeTD = true
|
intermediateStateNodes = true # PRERUN_INTERMEDIATE_STATE_NODES
|
||||||
includeCode = true
|
intermediateStorageNodes = true # PRERUN_INTERMEDIATE_STORAGE_NODES
|
||||||
|
includeBlock = true # PRERUN_INCLUDE_BLOCK
|
||||||
|
includeReceipts = true # PRERUN_INCLUDE_RECEIPTS
|
||||||
|
includeTD = true # PRERUN_INCLUDE_TD
|
||||||
|
includeCode = true # PRERUN_INCLUDE_CODE
|
||||||
watchedAddresses = []
|
watchedAddresses = []
|
||||||
|
|
||||||
[log]
|
[log]
|
||||||
file = ""
|
# Leave empty to output to stdout
|
||||||
level = "info"
|
file = "" # LOG_FILE_PATH
|
||||||
|
level = "info" # LOG_LEVEL
|
||||||
|
|
||||||
[database]
|
[database]
|
||||||
name = "cerc_testing"
|
# output type <postgres | file | dump>
|
||||||
hostname = "localhost"
|
|
||||||
port = 5432
|
|
||||||
user = "vdbm"
|
|
||||||
password = "password"
|
|
||||||
type = "postgres"
|
type = "postgres"
|
||||||
driver = "sqlx"
|
|
||||||
dumpDestination = ""
|
# with postgres type
|
||||||
filePath = ""
|
# db credentials
|
||||||
|
name = "vulcanize_test" # DATABASE_NAME
|
||||||
|
hostname = "localhost" # DATABASE_HOSTNAME
|
||||||
|
port = 5432 # DATABASE_PORT
|
||||||
|
user = "vulcanize" # DATABASE_USER
|
||||||
|
password = "..." # DATABASE_PASSWORD
|
||||||
|
# SQL backend to use: <sqlx | pgx>
|
||||||
|
driver = "sqlx" # DATABASE_DRIVER_TYPE
|
||||||
|
|
||||||
|
# with file type
|
||||||
|
# file mode <sql | csv>
|
||||||
|
fileMode = "csv" # DATABASE_FILE_MODE
|
||||||
|
|
||||||
|
# with SQL file mode
|
||||||
|
filePath = "" # DATABASE_FILE_PATH
|
||||||
|
|
||||||
|
# with CSV file mode
|
||||||
|
fileCsvDir = "output_dir" # DATABASE_FILE_CSV_DIR
|
||||||
|
|
||||||
|
# with dump type
|
||||||
|
# <stdout | stderr | discard>
|
||||||
|
dumpDestination = "" # DATABASE_DUMP_DST
|
||||||
|
|
||||||
[cache]
|
[cache]
|
||||||
database = 1024
|
# settings for geth internal caches
|
||||||
trie = 1024
|
database = 1024 # DB_CACHE_SIZE_MB
|
||||||
|
trie = 1024 # TRIE_CACHE_SIZE_MB
|
||||||
|
|
||||||
[prom]
|
[prom]
|
||||||
dbStats = false
|
# prometheus metrics
|
||||||
metrics = true
|
metrics = true # PROM_METRICS
|
||||||
http = true
|
http = true # PROM_HTTP
|
||||||
httpAddr = "localhost"
|
httpAddr = "localhost" # PROM_HTTP_ADDR
|
||||||
httpPort = "8889"
|
httpPort = "8889" # PROM_HTTP_PORT
|
||||||
|
dbStats = true # PROM_DB_STATS
|
||||||
|
|
||||||
[ethereum]
|
[ethereum]
|
||||||
chainConfig = ""
|
# Identifiers for ethereum node
|
||||||
nodeID = ""
|
nodeID = "" # ETH_NODE_ID
|
||||||
clientName = "eth-statediff-service"
|
clientName = "eth-statediff-service" # ETH_CLIENT_NAME
|
||||||
genesisBlock = "0xd4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3"
|
networkID = 1 # ETH_NETWORK_ID
|
||||||
networkID = 1
|
chainID = 1 # ETH_CHAIN_ID
|
||||||
chainID = 1
|
genesisBlock = "0xd4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3" # ETH_GENESIS_BLOCK
|
||||||
|
|
||||||
|
# Path to custom chain config file (optional)
|
||||||
|
# chainID should match that in this config file
|
||||||
|
chainConfig = "chain.json" # ETH_CHAIN_CONFIG
|
||||||
|
|
||||||
|
[debug]
|
||||||
|
pprof = false # DEBUG_PPROF
|
||||||
|
6
fixture/.gitignore
vendored
Normal file
6
fixture/.gitignore
vendored
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
*/*.log
|
||||||
|
*/CURRENT*
|
||||||
|
*/LOCK
|
||||||
|
*/LOG
|
||||||
|
*/MANIFEST-*
|
||||||
|
*/ancient/FLOCK
|
BIN
fixture/chaindata/000002.ldb
Normal file
BIN
fixture/chaindata/000002.ldb
Normal file
Binary file not shown.
BIN
fixture/chaindata/000004.ldb
Normal file
BIN
fixture/chaindata/000004.ldb
Normal file
Binary file not shown.
0
fixture/chaindata/ancient/bodies.0000.cdat
Normal file
0
fixture/chaindata/ancient/bodies.0000.cdat
Normal file
BIN
fixture/chaindata/ancient/bodies.cidx
Normal file
BIN
fixture/chaindata/ancient/bodies.cidx
Normal file
Binary file not shown.
1
fixture/chaindata/ancient/bodies.meta
Normal file
1
fixture/chaindata/ancient/bodies.meta
Normal file
@ -0,0 +1 @@
|
|||||||
|
<EFBFBD><01>
|
0
fixture/chaindata/ancient/diffs.0000.rdat
Normal file
0
fixture/chaindata/ancient/diffs.0000.rdat
Normal file
1
fixture/chaindata/ancient/diffs.meta
Normal file
1
fixture/chaindata/ancient/diffs.meta
Normal file
@ -0,0 +1 @@
|
|||||||
|
<EFBFBD><01>
|
BIN
fixture/chaindata/ancient/diffs.ridx
Normal file
BIN
fixture/chaindata/ancient/diffs.ridx
Normal file
Binary file not shown.
0
fixture/chaindata/ancient/hashes.0000.rdat
Normal file
0
fixture/chaindata/ancient/hashes.0000.rdat
Normal file
1
fixture/chaindata/ancient/hashes.meta
Normal file
1
fixture/chaindata/ancient/hashes.meta
Normal file
@ -0,0 +1 @@
|
|||||||
|
<EFBFBD><01>
|
BIN
fixture/chaindata/ancient/hashes.ridx
Normal file
BIN
fixture/chaindata/ancient/hashes.ridx
Normal file
Binary file not shown.
0
fixture/chaindata/ancient/headers.0000.cdat
Normal file
0
fixture/chaindata/ancient/headers.0000.cdat
Normal file
BIN
fixture/chaindata/ancient/headers.cidx
Normal file
BIN
fixture/chaindata/ancient/headers.cidx
Normal file
Binary file not shown.
1
fixture/chaindata/ancient/headers.meta
Normal file
1
fixture/chaindata/ancient/headers.meta
Normal file
@ -0,0 +1 @@
|
|||||||
|
<EFBFBD><01>
|
0
fixture/chaindata/ancient/receipts.0000.cdat
Normal file
0
fixture/chaindata/ancient/receipts.0000.cdat
Normal file
BIN
fixture/chaindata/ancient/receipts.cidx
Normal file
BIN
fixture/chaindata/ancient/receipts.cidx
Normal file
Binary file not shown.
1
fixture/chaindata/ancient/receipts.meta
Normal file
1
fixture/chaindata/ancient/receipts.meta
Normal file
@ -0,0 +1 @@
|
|||||||
|
<EFBFBD><01>
|
14
go.mod
14
go.mod
@ -1,16 +1,16 @@
|
|||||||
module github.com/vulcanize/eth-statediff-service
|
module github.com/cerc-io/eth-statediff-service
|
||||||
|
|
||||||
go 1.19
|
go 1.19
|
||||||
|
|
||||||
require (
|
require (
|
||||||
|
github.com/cerc-io/leveldb-ethdb-rpc v1.1.13
|
||||||
github.com/cerc-io/plugeth-statediff v0.0.0-00010101000000-000000000000
|
github.com/cerc-io/plugeth-statediff v0.0.0-00010101000000-000000000000
|
||||||
github.com/ethereum/go-ethereum v1.12.0
|
github.com/ethereum/go-ethereum v1.12.0
|
||||||
github.com/jmoiron/sqlx v1.2.0
|
github.com/jmoiron/sqlx v1.3.5 // indirect
|
||||||
github.com/prometheus/client_golang v1.16.0
|
github.com/prometheus/client_golang v1.16.0
|
||||||
github.com/sirupsen/logrus v1.9.0
|
github.com/sirupsen/logrus v1.9.0
|
||||||
github.com/spf13/cobra v1.3.0
|
github.com/spf13/cobra v1.3.0
|
||||||
github.com/spf13/viper v1.10.1
|
github.com/spf13/viper v1.10.1
|
||||||
github.com/vulcanize/leveldb-ethdb-rpc v0.1.2
|
|
||||||
)
|
)
|
||||||
|
|
||||||
require (
|
require (
|
||||||
@ -112,7 +112,7 @@ require (
|
|||||||
github.com/stretchr/objx v0.5.0 // indirect
|
github.com/stretchr/objx v0.5.0 // indirect
|
||||||
github.com/stretchr/testify v1.8.2 // indirect
|
github.com/stretchr/testify v1.8.2 // indirect
|
||||||
github.com/subosito/gotenv v1.2.0 // indirect
|
github.com/subosito/gotenv v1.2.0 // indirect
|
||||||
github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7 // indirect
|
github.com/syndtr/goleveldb v1.0.1-0.20220614013038-64ee5596c38a // indirect
|
||||||
github.com/thoas/go-funk v0.9.3 // indirect
|
github.com/thoas/go-funk v0.9.3 // indirect
|
||||||
github.com/tklauser/go-sysconf v0.3.11 // indirect
|
github.com/tklauser/go-sysconf v0.3.11 // indirect
|
||||||
github.com/tklauser/numcpus v0.6.1 // indirect
|
github.com/tklauser/numcpus v0.6.1 // indirect
|
||||||
@ -139,9 +139,9 @@ require (
|
|||||||
|
|
||||||
replace (
|
replace (
|
||||||
github.com/cerc-io/eth-iterator-utils => git.vdb.to/cerc-io/eth-iterator-utils v0.0.0-20230803115933-6bb6d4e27fd2
|
github.com/cerc-io/eth-iterator-utils => git.vdb.to/cerc-io/eth-iterator-utils v0.0.0-20230803115933-6bb6d4e27fd2
|
||||||
|
// github.com/cerc-io/plugeth-statediff => ../plugeth-statediff
|
||||||
// github.com/cerc-io/plugeth-statediff => git.vdb.to/cerc-io/plugeth-statediff v0.1.0
|
// github.com/cerc-io/plugeth-statediff => git.vdb.to/cerc-io/plugeth-statediff v0.1.0
|
||||||
github.com/cerc-io/plugeth-statediff => ../plugeth-statediff
|
github.com/cerc-io/plugeth-statediff => git.vdb.to/cerc-io/plugeth-statediff v0.1.1-0.20230901070823-5ed22c03d29a
|
||||||
// github.com/ethereum/go-ethereum => git.vdb.to/cerc-io/plugeth v0.0.0-20230808125822-691dc334fab1
|
github.com/ethereum/go-ethereum => git.vdb.to/cerc-io/plugeth v0.0.0-20230808125822-691dc334fab1
|
||||||
github.com/ethereum/go-ethereum => ../plugeth
|
|
||||||
github.com/openrelayxyz/plugeth-utils => git.vdb.to/cerc-io/plugeth-utils v0.0.0-20230706160122-cd41de354c46
|
github.com/openrelayxyz/plugeth-utils => git.vdb.to/cerc-io/plugeth-utils v0.0.0-20230706160122-cd41de354c46
|
||||||
)
|
)
|
||||||
|
48
go.sum
48
go.sum
@ -48,6 +48,10 @@ cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9
|
|||||||
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
|
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
|
||||||
git.vdb.to/cerc-io/eth-iterator-utils v0.0.0-20230803115933-6bb6d4e27fd2 h1:UImCl/qXSk9WD5hG1PLS4wfjkHIdgr+5BvzsNqeWEv4=
|
git.vdb.to/cerc-io/eth-iterator-utils v0.0.0-20230803115933-6bb6d4e27fd2 h1:UImCl/qXSk9WD5hG1PLS4wfjkHIdgr+5BvzsNqeWEv4=
|
||||||
git.vdb.to/cerc-io/eth-iterator-utils v0.0.0-20230803115933-6bb6d4e27fd2/go.mod h1:VtTq+MbVe5aKTtb/CoSySjz5vXPFOs9HPBTALmEUfj4=
|
git.vdb.to/cerc-io/eth-iterator-utils v0.0.0-20230803115933-6bb6d4e27fd2/go.mod h1:VtTq+MbVe5aKTtb/CoSySjz5vXPFOs9HPBTALmEUfj4=
|
||||||
|
git.vdb.to/cerc-io/plugeth v0.0.0-20230808125822-691dc334fab1 h1:KLjxHwp9Zp7xhECccmJS00RiL+VwTuUGLU7qeIctg8g=
|
||||||
|
git.vdb.to/cerc-io/plugeth v0.0.0-20230808125822-691dc334fab1/go.mod h1:cYXZu70+6xmDgIgrTD81GPasv16piiAFJnKyAbwVPMU=
|
||||||
|
git.vdb.to/cerc-io/plugeth-statediff v0.1.1-0.20230901070823-5ed22c03d29a h1:/bCbcXZNqampJ86LqHr1U3QiKtys4w264q5jSFpuEBQ=
|
||||||
|
git.vdb.to/cerc-io/plugeth-statediff v0.1.1-0.20230901070823-5ed22c03d29a/go.mod h1:1VcABflu6M2GvcBXu5nc+GrGcgR6/2AZXtOIjMZgceo=
|
||||||
git.vdb.to/cerc-io/plugeth-utils v0.0.0-20230706160122-cd41de354c46 h1:KYcbbne/RXd7AuxbUd/3hgk1jPN+33k2CKiNsUsMCC0=
|
git.vdb.to/cerc-io/plugeth-utils v0.0.0-20230706160122-cd41de354c46 h1:KYcbbne/RXd7AuxbUd/3hgk1jPN+33k2CKiNsUsMCC0=
|
||||||
git.vdb.to/cerc-io/plugeth-utils v0.0.0-20230706160122-cd41de354c46/go.mod h1:VpDN61dxy64zGff05F0adujR5enD/JEdXBkTQ+PaIsQ=
|
git.vdb.to/cerc-io/plugeth-utils v0.0.0-20230706160122-cd41de354c46/go.mod h1:VpDN61dxy64zGff05F0adujR5enD/JEdXBkTQ+PaIsQ=
|
||||||
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
||||||
@ -81,6 +85,8 @@ github.com/btcsuite/btcd/btcec/v2 v2.3.2/go.mod h1:zYzJ8etWJQIv1Ogk7OzpWjowwOdXY
|
|||||||
github.com/btcsuite/btcd/chaincfg/chainhash v1.0.1 h1:q0rUy8C/TYNBQS1+CGKw68tLOFYSNEs0TFnxxnS9+4U=
|
github.com/btcsuite/btcd/chaincfg/chainhash v1.0.1 h1:q0rUy8C/TYNBQS1+CGKw68tLOFYSNEs0TFnxxnS9+4U=
|
||||||
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
|
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
|
||||||
github.com/census-instrumentation/opencensus-proto v0.3.0/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
|
github.com/census-instrumentation/opencensus-proto v0.3.0/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
|
||||||
|
github.com/cerc-io/leveldb-ethdb-rpc v1.1.13 h1:XM+e/JLKjNoYc4Xj7DJNNlFI4+3HpqZ9VkVlrWBlwHg=
|
||||||
|
github.com/cerc-io/leveldb-ethdb-rpc v1.1.13/go.mod h1:ZNa5ySrKJ0ZLsJ0nSNaQ+11PkX29Juy9+omMgP6mw1c=
|
||||||
github.com/cespare/cp v0.1.0 h1:SE+dxFebS7Iik5LK0tsi1k9ZCxEaFX4AjQmoyA+1dJk=
|
github.com/cespare/cp v0.1.0 h1:SE+dxFebS7Iik5LK0tsi1k9ZCxEaFX4AjQmoyA+1dJk=
|
||||||
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
|
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
|
||||||
github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
||||||
@ -164,6 +170,7 @@ github.com/fjl/memsize v0.0.1/go.mod h1:VvhXpOYNQvB+uIk2RvXzuaQtkQJzzIx6lSBe1xv7
|
|||||||
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
|
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
|
||||||
github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ=
|
github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ=
|
||||||
github.com/fsnotify/fsnotify v1.5.1/go.mod h1:T3375wBYaZdLLcVNkcVbzGHY7f1l/uK5T5Ai1i3InKU=
|
github.com/fsnotify/fsnotify v1.5.1/go.mod h1:T3375wBYaZdLLcVNkcVbzGHY7f1l/uK5T5Ai1i3InKU=
|
||||||
|
github.com/fsnotify/fsnotify v1.5.4/go.mod h1:OVB6XrOHzAwXMpEM7uPOzcehqUV2UqJxmVXmkdnm1bU=
|
||||||
github.com/fsnotify/fsnotify v1.6.0 h1:n+5WquG0fcWoWp6xPWfHdbskMCQaFnG6PfBrh1Ky4HY=
|
github.com/fsnotify/fsnotify v1.6.0 h1:n+5WquG0fcWoWp6xPWfHdbskMCQaFnG6PfBrh1Ky4HY=
|
||||||
github.com/fsnotify/fsnotify v1.6.0/go.mod h1:sl3t1tCWJFWoRz9R8WJCbQihKKwmorjAbSClcnxKAGw=
|
github.com/fsnotify/fsnotify v1.6.0/go.mod h1:sl3t1tCWJFWoRz9R8WJCbQihKKwmorjAbSClcnxKAGw=
|
||||||
github.com/gballet/go-libpcsclite v0.0.0-20190607065134-2772fd86a8ff h1:tY80oXqGNY4FhTFhk+o9oFHGINQ/+vhlm8HFzi6znCI=
|
github.com/gballet/go-libpcsclite v0.0.0-20190607065134-2772fd86a8ff h1:tY80oXqGNY4FhTFhk+o9oFHGINQ/+vhlm8HFzi6znCI=
|
||||||
@ -191,11 +198,13 @@ github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiU
|
|||||||
github.com/go-openapi/jsonpointer v0.19.5/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg=
|
github.com/go-openapi/jsonpointer v0.19.5/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg=
|
||||||
github.com/go-openapi/swag v0.19.5/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk=
|
github.com/go-openapi/swag v0.19.5/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk=
|
||||||
github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w=
|
github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w=
|
||||||
github.com/go-sql-driver/mysql v1.4.1 h1:g24URVg0OFbNUTx9qqY1IRZ9D9z3iPyi5zKhQZpNwpA=
|
|
||||||
github.com/go-sql-driver/mysql v1.4.1/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w=
|
github.com/go-sql-driver/mysql v1.4.1/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w=
|
||||||
|
github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE=
|
||||||
|
github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
|
||||||
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
|
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
|
||||||
github.com/go-stack/stack v1.8.1 h1:ntEHSVwIt7PNXNpgPmVfMrNhLtgjlmnZha2kOpuRiDw=
|
github.com/go-stack/stack v1.8.1 h1:ntEHSVwIt7PNXNpgPmVfMrNhLtgjlmnZha2kOpuRiDw=
|
||||||
github.com/go-stack/stack v1.8.1/go.mod h1:dcoOX6HbPZSZptuspn9bctJ+N/CnF5gGygcUP3XYfe4=
|
github.com/go-stack/stack v1.8.1/go.mod h1:dcoOX6HbPZSZptuspn9bctJ+N/CnF5gGygcUP3XYfe4=
|
||||||
|
github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE=
|
||||||
github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
|
github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
|
||||||
github.com/gofrs/flock v0.8.1 h1:+gYjHKf32LDeiEEFhQaotPbLuUXjY5ZqxKgXy7n59aw=
|
github.com/gofrs/flock v0.8.1 h1:+gYjHKf32LDeiEEFhQaotPbLuUXjY5ZqxKgXy7n59aw=
|
||||||
github.com/gofrs/flock v0.8.1/go.mod h1:F1TvTiK9OcQqauNUHlbJvyl9Qa1QvF/gOUDKA14jxHU=
|
github.com/gofrs/flock v0.8.1/go.mod h1:F1TvTiK9OcQqauNUHlbJvyl9Qa1QvF/gOUDKA14jxHU=
|
||||||
@ -278,6 +287,7 @@ github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLe
|
|||||||
github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
|
github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
|
||||||
github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
|
github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
|
||||||
github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
|
github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
|
||||||
|
github.com/google/pprof v0.0.0-20210407192527-94a9f03dee38/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
|
||||||
github.com/google/pprof v0.0.0-20210601050228-01bbb1931b22/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
|
github.com/google/pprof v0.0.0-20210601050228-01bbb1931b22/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
|
||||||
github.com/google/pprof v0.0.0-20210609004039-a478d1d731e9/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
|
github.com/google/pprof v0.0.0-20210609004039-a478d1d731e9/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
|
||||||
github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
|
github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
|
||||||
@ -425,8 +435,9 @@ github.com/jackpal/go-nat-pmp v1.0.2/go.mod h1:QPH045xvCAeXUZOxsnwmrtiCoxIr9eob+
|
|||||||
github.com/jinzhu/gorm v1.9.12/go.mod h1:vhTjlKSJUTWNtcbQtrMBFCxy7eXTzeCAzfL5fBZT/Qs=
|
github.com/jinzhu/gorm v1.9.12/go.mod h1:vhTjlKSJUTWNtcbQtrMBFCxy7eXTzeCAzfL5fBZT/Qs=
|
||||||
github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc=
|
github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc=
|
||||||
github.com/jinzhu/now v1.0.1/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8=
|
github.com/jinzhu/now v1.0.1/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8=
|
||||||
github.com/jmoiron/sqlx v1.2.0 h1:41Ip0zITnmWNR/vHV+S4m+VoUivnWY5E4OJfLZjCJMA=
|
|
||||||
github.com/jmoiron/sqlx v1.2.0/go.mod h1:1FEQNm3xlJgrMD+FBdI9+xvCksHtbpVBBw5dYhBSsks=
|
github.com/jmoiron/sqlx v1.2.0/go.mod h1:1FEQNm3xlJgrMD+FBdI9+xvCksHtbpVBBw5dYhBSsks=
|
||||||
|
github.com/jmoiron/sqlx v1.3.5 h1:vFFPA71p1o5gAeqtEAwLU4dnX2napprKtHr7PYIcN3g=
|
||||||
|
github.com/jmoiron/sqlx v1.3.5/go.mod h1:nRVWtLre0KfCLJvgxzCsLVMogSvQ1zNJtpYr2Ccp0mQ=
|
||||||
github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU=
|
github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU=
|
||||||
github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
|
github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
|
||||||
github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
|
github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
|
||||||
@ -500,6 +511,7 @@ github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m
|
|||||||
github.com/mattn/go-runewidth v0.0.14 h1:+xnbZSEeDbOIg5/mE6JF0w6n9duR1l3/WmbinWVwUuU=
|
github.com/mattn/go-runewidth v0.0.14 h1:+xnbZSEeDbOIg5/mE6JF0w6n9duR1l3/WmbinWVwUuU=
|
||||||
github.com/mattn/go-runewidth v0.0.14/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
|
github.com/mattn/go-runewidth v0.0.14/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
|
||||||
github.com/mattn/go-sqlite3 v1.9.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc=
|
github.com/mattn/go-sqlite3 v1.9.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc=
|
||||||
|
github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU=
|
||||||
github.com/mattn/go-sqlite3 v2.0.1+incompatible h1:xQ15muvnzGBHpIpdrNi1DA5x0+TcBZzsIDwmw9uTHzw=
|
github.com/mattn/go-sqlite3 v2.0.1+incompatible h1:xQ15muvnzGBHpIpdrNi1DA5x0+TcBZzsIDwmw9uTHzw=
|
||||||
github.com/mattn/go-sqlite3 v2.0.1+incompatible/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc=
|
github.com/mattn/go-sqlite3 v2.0.1+incompatible/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc=
|
||||||
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
|
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
|
||||||
@ -539,17 +551,22 @@ github.com/multiformats/go-multihash v0.2.3/go.mod h1:dXgKXCXjBzdscBLk9JkjINiEsC
|
|||||||
github.com/multiformats/go-varint v0.0.7 h1:sWSGR+f/eu5ABZA2ZpYKBILXTTs9JWpdEM/nEGOHFS8=
|
github.com/multiformats/go-varint v0.0.7 h1:sWSGR+f/eu5ABZA2ZpYKBILXTTs9JWpdEM/nEGOHFS8=
|
||||||
github.com/multiformats/go-varint v0.0.7/go.mod h1:r8PUYw/fD/SjBCiKOoDlGF6QawOELpZAu9eioSos/OU=
|
github.com/multiformats/go-varint v0.0.7/go.mod h1:r8PUYw/fD/SjBCiKOoDlGF6QawOELpZAu9eioSos/OU=
|
||||||
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
|
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
|
||||||
github.com/nxadm/tail v1.4.4 h1:DQuhQpB1tVlglWS2hLQ5OV6B5r8aGxSrPc5Qo6uTN78=
|
|
||||||
github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A=
|
github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A=
|
||||||
|
github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE=
|
||||||
|
github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU=
|
||||||
github.com/olekukonko/tablewriter v0.0.5 h1:P2Ga83D34wi1o9J6Wh1mRuqd4mF/x/lgBS7N7AbDhec=
|
github.com/olekukonko/tablewriter v0.0.5 h1:P2Ga83D34wi1o9J6Wh1mRuqd4mF/x/lgBS7N7AbDhec=
|
||||||
github.com/olekukonko/tablewriter v0.0.5/go.mod h1:hPp6KlRPjbx+hW8ykQs1w3UBbZlj6HuIJcUGPhkA7kY=
|
github.com/olekukonko/tablewriter v0.0.5/go.mod h1:hPp6KlRPjbx+hW8ykQs1w3UBbZlj6HuIJcUGPhkA7kY=
|
||||||
github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
|
github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
|
||||||
github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk=
|
github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk=
|
||||||
github.com/onsi/ginkgo v1.14.0/go.mod h1:iSB4RoI2tjJc9BBv4NKIKWKya62Rps+oPG/Lv9klQyY=
|
github.com/onsi/ginkgo v1.16.4/go.mod h1:dX+/inL/fNMqNlz0e9LfyB9TswhZpCVdJM/Z6Vvnwo0=
|
||||||
github.com/onsi/ginkgo v1.16.5 h1:8xi0RTUf59SOSfEtZMvwTvXYMzG4gV23XVHOZiXNtnE=
|
github.com/onsi/ginkgo v1.16.5 h1:8xi0RTUf59SOSfEtZMvwTvXYMzG4gV23XVHOZiXNtnE=
|
||||||
|
github.com/onsi/ginkgo v1.16.5/go.mod h1:+E8gABHa3K6zRBolWtd+ROzc/U5bkGt0FwiG042wbpU=
|
||||||
|
github.com/onsi/ginkgo/v2 v2.1.3/go.mod h1:vw5CSIxN1JObi/U8gcbwft7ZxR2dgaR70JSE3/PpL4c=
|
||||||
github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY=
|
github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY=
|
||||||
github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo=
|
github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo=
|
||||||
github.com/onsi/gomega v1.13.0 h1:7lLHu94wT9Ij0o6EWWclhu0aOh32VxhkwEJvzuWPeak=
|
github.com/onsi/gomega v1.17.0/go.mod h1:HnhC7FXeEQY45zxNK3PPoIUhzk/80Xly9PcubAlGdZY=
|
||||||
|
github.com/onsi/gomega v1.19.0 h1:4ieX6qQjPP/BfC3mpsAtIGGlxTWPeA3Inl/7DtXw1tw=
|
||||||
|
github.com/onsi/gomega v1.19.0/go.mod h1:LY+I3pBVzYsTBU1AnDwOSxaYi9WoWiqgwooUqq9yPro=
|
||||||
github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o=
|
github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o=
|
||||||
github.com/opentracing/opentracing-go v1.2.0 h1:uEJPy/1a5RIPAJ0Ov+OIO8OxWu77jEv+1B0VhjKrZUs=
|
github.com/opentracing/opentracing-go v1.2.0 h1:uEJPy/1a5RIPAJ0Ov+OIO8OxWu77jEv+1B0VhjKrZUs=
|
||||||
github.com/opentracing/opentracing-go v1.2.0/go.mod h1:GxEUsuufX4nBwe+T+Wl9TAgYrxe9dPLANfrWvHYVTgc=
|
github.com/opentracing/opentracing-go v1.2.0/go.mod h1:GxEUsuufX4nBwe+T+Wl9TAgYrxe9dPLANfrWvHYVTgc=
|
||||||
@ -655,13 +672,14 @@ github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5
|
|||||||
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||||
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||||
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||||
|
github.com/stretchr/testify v1.7.2/go.mod h1:R6va5+xMeoiuVRoj+gSkQ7d3FALtqAAGI1FQKckRals=
|
||||||
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
||||||
github.com/stretchr/testify v1.8.2 h1:+h33VjcLVPDHtOdpUCuF+7gSuG3yGIftsP1YvFihtJ8=
|
github.com/stretchr/testify v1.8.2 h1:+h33VjcLVPDHtOdpUCuF+7gSuG3yGIftsP1YvFihtJ8=
|
||||||
github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
|
github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
|
||||||
github.com/subosito/gotenv v1.2.0 h1:Slr1R9HxAlEKefgq5jn9U+DnETlIUa6HfgEzj0g5d7s=
|
github.com/subosito/gotenv v1.2.0 h1:Slr1R9HxAlEKefgq5jn9U+DnETlIUa6HfgEzj0g5d7s=
|
||||||
github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw=
|
github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw=
|
||||||
github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7 h1:epCh84lMvA70Z7CTTCmYQn2CKbY8j86K7/FAIr141uY=
|
github.com/syndtr/goleveldb v1.0.1-0.20220614013038-64ee5596c38a h1:1ur3QoCqvE5fl+nylMaIr9PVV1w343YRDtsy+Rwu7XI=
|
||||||
github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7/go.mod h1:q4W45IWZaF22tdD+VEXcAWRA037jwmWEB5VWYORlTpc=
|
github.com/syndtr/goleveldb v1.0.1-0.20220614013038-64ee5596c38a/go.mod h1:RRCYJbIwD5jmqPI9XoAFR0OcDxqUctll6zUj/+B4S48=
|
||||||
github.com/thoas/go-funk v0.9.3 h1:7+nAEx3kn5ZJcnDm2Bh23N2yOtweO14bi//dvRtgLpw=
|
github.com/thoas/go-funk v0.9.3 h1:7+nAEx3kn5ZJcnDm2Bh23N2yOtweO14bi//dvRtgLpw=
|
||||||
github.com/thoas/go-funk v0.9.3/go.mod h1:+IWnUfUmFO1+WVYQWQtIJHeRRdaIyyYglZN7xzUPe4Q=
|
github.com/thoas/go-funk v0.9.3/go.mod h1:+IWnUfUmFO1+WVYQWQtIJHeRRdaIyyYglZN7xzUPe4Q=
|
||||||
github.com/tklauser/go-sysconf v0.3.11 h1:89WgdJhk5SNwJfu+GKyYveZ4IaJ7xAkecBo+KdJV0CM=
|
github.com/tklauser/go-sysconf v0.3.11 h1:89WgdJhk5SNwJfu+GKyYveZ4IaJ7xAkecBo+KdJV0CM=
|
||||||
@ -677,8 +695,6 @@ github.com/urfave/cli/v2 v2.25.7/go.mod h1:8qnjx1vcq5s2/wpsqoZFndg2CE5tNFyrTvS6S
|
|||||||
github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc=
|
github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc=
|
||||||
github.com/valyala/fasttemplate v1.0.1/go.mod h1:UQGH1tvbgY+Nz5t2n7tXsz52dQxojPUpymEIMZ47gx8=
|
github.com/valyala/fasttemplate v1.0.1/go.mod h1:UQGH1tvbgY+Nz5t2n7tXsz52dQxojPUpymEIMZ47gx8=
|
||||||
github.com/valyala/fasttemplate v1.2.1/go.mod h1:KHLXt3tVN2HBp8eijSv/kGJopbvo7S+qRAEEKiv+SiQ=
|
github.com/valyala/fasttemplate v1.2.1/go.mod h1:KHLXt3tVN2HBp8eijSv/kGJopbvo7S+qRAEEKiv+SiQ=
|
||||||
github.com/vulcanize/leveldb-ethdb-rpc v0.1.2 h1:ws/CsvxsPQh6IOQGsUDCPaNuPzcDjtVW1sT8o20Nfg4=
|
|
||||||
github.com/vulcanize/leveldb-ethdb-rpc v0.1.2/go.mod h1:lksnBYanEe9GuDDeYMtpbc0JnPy4mOKImw4M8vkqJDw=
|
|
||||||
github.com/xrash/smetrics v0.0.0-20201216005158-039620a65673 h1:bAn7/zixMGCfxrRTfdpNzjtPYqr8smhKouy9mxVdGPU=
|
github.com/xrash/smetrics v0.0.0-20201216005158-039620a65673 h1:bAn7/zixMGCfxrRTfdpNzjtPYqr8smhKouy9mxVdGPU=
|
||||||
github.com/xrash/smetrics v0.0.0-20201216005158-039620a65673/go.mod h1:N3UwUGtsrSj3ccvlPHLoLsHnpR27oXr4ZE984MbSER8=
|
github.com/xrash/smetrics v0.0.0-20201216005158-039620a65673/go.mod h1:N3UwUGtsrSj3ccvlPHLoLsHnpR27oXr4ZE984MbSER8=
|
||||||
github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||||
@ -806,7 +822,6 @@ golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/
|
|||||||
golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
|
golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
|
||||||
golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
|
golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
|
||||||
golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
|
golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
|
||||||
golang.org/x/net v0.0.0-20200813134508-3edf25e44fcc/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
|
|
||||||
golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
|
golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
|
||||||
golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
|
golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
|
||||||
golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
|
golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
|
||||||
@ -817,8 +832,11 @@ golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v
|
|||||||
golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc=
|
golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc=
|
||||||
golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM=
|
golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM=
|
||||||
golang.org/x/net v0.0.0-20210410081132-afb366fc7cd1/go.mod h1:9tjilg8BloeKEkVJvy7fQ90B1CfIiPueXVOjqfkSzI8=
|
golang.org/x/net v0.0.0-20210410081132-afb366fc7cd1/go.mod h1:9tjilg8BloeKEkVJvy7fQ90B1CfIiPueXVOjqfkSzI8=
|
||||||
|
golang.org/x/net v0.0.0-20210428140749-89ef3d95e781/go.mod h1:OJAsFXCWl8Ukc7SiCT/9KSuxbyM7479/AVlXFRxuMCk=
|
||||||
golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||||
golang.org/x/net v0.0.0-20210813160813-60bc85c4be6d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
golang.org/x/net v0.0.0-20210813160813-60bc85c4be6d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||||
|
golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
|
||||||
|
golang.org/x/net v0.0.0-20220607020251-c690dde0001d/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||||
golang.org/x/net v0.10.0 h1:X2//UzNDwYmtCLn7To6G58Wr6f5ahEAQgKNzv9Y951M=
|
golang.org/x/net v0.10.0 h1:X2//UzNDwYmtCLn7To6G58Wr6f5ahEAQgKNzv9Y951M=
|
||||||
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
|
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
|
||||||
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||||
@ -894,16 +912,15 @@ golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7w
|
|||||||
golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20200519105757-fe76b779f299/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
|
||||||
golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20200814200057-3d37ad5750ed/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
|
||||||
golang.org/x/sys v0.0.0-20200826173525-f9321e4c35a6/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20200826173525-f9321e4c35a6/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
golang.org/x/sys v0.0.0-20210112080510-489259a85091/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
@ -928,6 +945,9 @@ golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBc
|
|||||||
golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.0.0-20211124211545-fe61309f8881/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20211124211545-fe61309f8881/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.0.0-20211205182925-97ca703d548d/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20211205182925-97ca703d548d/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.0.0-20220908164124-27713097b956/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20220908164124-27713097b956/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
@ -939,6 +959,7 @@ golang.org/x/sys v0.10.0 h1:SqMFp9UcQJZa+pmYuAKjd9xq1f0j5rLcDIk0mj4qAsA=
|
|||||||
golang.org/x/sys v0.10.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.10.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw=
|
golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw=
|
||||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||||
|
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||||
golang.org/x/term v0.10.0 h1:3R7pNqamzBraeqj/Tj8qt1aQ2HpmlC+Cx/qL/7hn4/c=
|
golang.org/x/term v0.10.0 h1:3R7pNqamzBraeqj/Tj8qt1aQ2HpmlC+Cx/qL/7hn4/c=
|
||||||
golang.org/x/term v0.10.0/go.mod h1:lpqdcUyK/oCiQxvxVrppt5ggO2KCZ5QblwqPnfZ6d5o=
|
golang.org/x/term v0.10.0/go.mod h1:lpqdcUyK/oCiQxvxVrppt5ggO2KCZ5QblwqPnfZ6d5o=
|
||||||
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||||
@ -1010,6 +1031,7 @@ golang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82u
|
|||||||
golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
|
golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
|
||||||
golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
|
golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
|
||||||
golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
|
golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
|
||||||
|
golang.org/x/tools v0.0.0-20201224043029-2b0845dc783e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
|
||||||
golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
|
golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
|
||||||
golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
|
golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
|
||||||
golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0=
|
golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0=
|
||||||
@ -1024,6 +1046,7 @@ golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8T
|
|||||||
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
|
golang.org/x/xerrors v0.0.0-20220517211312-f3a8303e98df/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8=
|
||||||
google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE=
|
google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE=
|
||||||
google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M=
|
google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M=
|
||||||
google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg=
|
google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg=
|
||||||
@ -1062,7 +1085,6 @@ google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7
|
|||||||
google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0=
|
google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0=
|
||||||
google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
|
google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
|
||||||
google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
|
google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
|
||||||
google.golang.org/appengine v1.6.7 h1:FZR1q0exgwxzPzp/aF+VccGrSfxfPpkBqjIIEq3ru6c=
|
|
||||||
google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
|
google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
|
||||||
google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
|
google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
|
||||||
google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
|
google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
|
||||||
|
2
main.go
2
main.go
@ -15,7 +15,7 @@
|
|||||||
|
|
||||||
package main
|
package main
|
||||||
|
|
||||||
import "github.com/vulcanize/eth-statediff-service/cmd"
|
import "github.com/cerc-io/eth-statediff-service/cmd"
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
cmd.Execute()
|
cmd.Execute()
|
||||||
|
@ -5,4 +5,5 @@ type ServiceConfig struct {
|
|||||||
ServiceWorkers uint
|
ServiceWorkers uint
|
||||||
TrieWorkers uint
|
TrieWorkers uint
|
||||||
WorkerQueueSize uint
|
WorkerQueueSize uint
|
||||||
|
PreRuns []RangeRequest
|
||||||
}
|
}
|
||||||
|
@ -17,9 +17,9 @@
|
|||||||
package prom
|
package prom
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"database/sql"
|
|
||||||
|
|
||||||
"github.com/prometheus/client_golang/prometheus"
|
"github.com/prometheus/client_golang/prometheus"
|
||||||
|
|
||||||
|
dbmetrics "github.com/cerc-io/plugeth-statediff/indexer/database/metrics"
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
@ -29,7 +29,7 @@ const (
|
|||||||
|
|
||||||
// DBStatsGetter is an interface that gets sql.DBStats.
|
// DBStatsGetter is an interface that gets sql.DBStats.
|
||||||
type DBStatsGetter interface {
|
type DBStatsGetter interface {
|
||||||
Stats() sql.DBStats
|
Stats() dbmetrics.DbStats
|
||||||
}
|
}
|
||||||
|
|
||||||
// DBStatsCollector implements the prometheus.Collector interface.
|
// DBStatsCollector implements the prometheus.Collector interface.
|
||||||
@ -122,41 +122,41 @@ func (c DBStatsCollector) Collect(ch chan<- prometheus.Metric) {
|
|||||||
ch <- prometheus.MustNewConstMetric(
|
ch <- prometheus.MustNewConstMetric(
|
||||||
c.maxOpenDesc,
|
c.maxOpenDesc,
|
||||||
prometheus.GaugeValue,
|
prometheus.GaugeValue,
|
||||||
float64(stats.MaxOpenConnections),
|
float64(stats.MaxOpen()),
|
||||||
)
|
)
|
||||||
ch <- prometheus.MustNewConstMetric(
|
ch <- prometheus.MustNewConstMetric(
|
||||||
c.openDesc,
|
c.openDesc,
|
||||||
prometheus.GaugeValue,
|
prometheus.GaugeValue,
|
||||||
float64(stats.OpenConnections),
|
float64(stats.Open()),
|
||||||
)
|
)
|
||||||
ch <- prometheus.MustNewConstMetric(
|
ch <- prometheus.MustNewConstMetric(
|
||||||
c.inUseDesc,
|
c.inUseDesc,
|
||||||
prometheus.GaugeValue,
|
prometheus.GaugeValue,
|
||||||
float64(stats.InUse),
|
float64(stats.InUse()),
|
||||||
)
|
)
|
||||||
ch <- prometheus.MustNewConstMetric(
|
ch <- prometheus.MustNewConstMetric(
|
||||||
c.idleDesc,
|
c.idleDesc,
|
||||||
prometheus.GaugeValue,
|
prometheus.GaugeValue,
|
||||||
float64(stats.Idle),
|
float64(stats.Idle()),
|
||||||
)
|
)
|
||||||
ch <- prometheus.MustNewConstMetric(
|
ch <- prometheus.MustNewConstMetric(
|
||||||
c.waitedForDesc,
|
c.waitedForDesc,
|
||||||
prometheus.CounterValue,
|
prometheus.CounterValue,
|
||||||
float64(stats.WaitCount),
|
float64(stats.WaitCount()),
|
||||||
)
|
)
|
||||||
ch <- prometheus.MustNewConstMetric(
|
ch <- prometheus.MustNewConstMetric(
|
||||||
c.blockedSecondsDesc,
|
c.blockedSecondsDesc,
|
||||||
prometheus.CounterValue,
|
prometheus.CounterValue,
|
||||||
stats.WaitDuration.Seconds(),
|
stats.WaitDuration().Seconds(),
|
||||||
)
|
)
|
||||||
ch <- prometheus.MustNewConstMetric(
|
ch <- prometheus.MustNewConstMetric(
|
||||||
c.closedMaxIdleDesc,
|
c.closedMaxIdleDesc,
|
||||||
prometheus.CounterValue,
|
prometheus.CounterValue,
|
||||||
float64(stats.MaxIdleClosed),
|
float64(stats.MaxIdleClosed()),
|
||||||
)
|
)
|
||||||
ch <- prometheus.MustNewConstMetric(
|
ch <- prometheus.MustNewConstMetric(
|
||||||
c.closedMaxLifetimeDesc,
|
c.closedMaxLifetimeDesc,
|
||||||
prometheus.CounterValue,
|
prometheus.CounterValue,
|
||||||
float64(stats.MaxLifetimeClosed),
|
float64(stats.MaxLifetimeClosed()),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -19,7 +19,6 @@ package prom
|
|||||||
import (
|
import (
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/jmoiron/sqlx"
|
|
||||||
"github.com/prometheus/client_golang/prometheus"
|
"github.com/prometheus/client_golang/prometheus"
|
||||||
"github.com/prometheus/client_golang/prometheus/promauto"
|
"github.com/prometheus/client_golang/prometheus/promauto"
|
||||||
)
|
)
|
||||||
@ -123,7 +122,7 @@ func Init() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// RegisterDBCollector create metric collector for given connection
|
// RegisterDBCollector create metric collector for given connection
|
||||||
func RegisterDBCollector(name string, db *sqlx.DB) {
|
func RegisterDBCollector(name string, db DBStatsGetter) {
|
||||||
if metrics {
|
if metrics {
|
||||||
prometheus.Register(NewDBStatsCollector(name, db))
|
prometheus.Register(NewDBStatsCollector(name, db))
|
||||||
}
|
}
|
||||||
|
@ -16,9 +16,11 @@
|
|||||||
package statediff
|
package statediff
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"math/big"
|
"math/big"
|
||||||
|
|
||||||
|
"github.com/cerc-io/leveldb-ethdb-rpc/pkg/client"
|
||||||
"github.com/ethereum/go-ethereum/common"
|
"github.com/ethereum/go-ethereum/common"
|
||||||
"github.com/ethereum/go-ethereum/core/rawdb"
|
"github.com/ethereum/go-ethereum/core/rawdb"
|
||||||
"github.com/ethereum/go-ethereum/core/state"
|
"github.com/ethereum/go-ethereum/core/state"
|
||||||
@ -26,7 +28,6 @@ import (
|
|||||||
"github.com/ethereum/go-ethereum/ethdb"
|
"github.com/ethereum/go-ethereum/ethdb"
|
||||||
"github.com/ethereum/go-ethereum/params"
|
"github.com/ethereum/go-ethereum/params"
|
||||||
"github.com/ethereum/go-ethereum/trie"
|
"github.com/ethereum/go-ethereum/trie"
|
||||||
"github.com/vulcanize/leveldb-ethdb-rpc/pkg/client"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// Reader interface required by the statediffing service
|
// Reader interface required by the statediffing service
|
||||||
@ -36,6 +37,7 @@ type Reader interface {
|
|||||||
GetReceiptsByHash(hash common.Hash) (types.Receipts, error)
|
GetReceiptsByHash(hash common.Hash) (types.Receipts, error)
|
||||||
GetTdByHash(hash common.Hash) (*big.Int, error)
|
GetTdByHash(hash common.Hash) (*big.Int, error)
|
||||||
StateDB() state.Database
|
StateDB() state.Database
|
||||||
|
GetLatestHeader() (*types.Header, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
// LvlDBReader exposes the necessary Reader methods on lvldb
|
// LvlDBReader exposes the necessary Reader methods on lvldb
|
||||||
@ -136,3 +138,12 @@ func (ldr *LvlDBReader) GetTdByHash(hash common.Hash) (*big.Int, error) {
|
|||||||
func (ldr *LvlDBReader) StateDB() state.Database {
|
func (ldr *LvlDBReader) StateDB() state.Database {
|
||||||
return ldr.stateDB
|
return ldr.stateDB
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// GetLatestHeader gets the latest header from the levelDB
|
||||||
|
func (ldr *LvlDBReader) GetLatestHeader() (*types.Header, error) {
|
||||||
|
header := rawdb.ReadHeadHeader(ldr.ethDB)
|
||||||
|
if header == nil {
|
||||||
|
return nil, errors.New("unable to read head header")
|
||||||
|
}
|
||||||
|
return header, nil
|
||||||
|
}
|
||||||
|
@ -24,7 +24,7 @@ import (
|
|||||||
"github.com/ethereum/go-ethereum/rpc"
|
"github.com/ethereum/go-ethereum/rpc"
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
|
|
||||||
"github.com/vulcanize/eth-statediff-service/pkg/prom"
|
"github.com/cerc-io/eth-statediff-service/pkg/prom"
|
||||||
)
|
)
|
||||||
|
|
||||||
// StartHTTPEndpoint starts the HTTP RPC endpoint, configured with cors/vhosts/modules.
|
// StartHTTPEndpoint starts the HTTP RPC endpoint, configured with cors/vhosts/modules.
|
||||||
|
@ -26,7 +26,7 @@ import (
|
|||||||
"github.com/ethereum/go-ethereum/rpc"
|
"github.com/ethereum/go-ethereum/rpc"
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
|
|
||||||
"github.com/vulcanize/eth-statediff-service/pkg/prom"
|
"github.com/cerc-io/eth-statediff-service/pkg/prom"
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
@ -79,7 +79,7 @@ func StartIPCEndpoint(ipcEndpoint string, apis []rpc.API) (net.Listener, *rpc.Se
|
|||||||
if err := handler.RegisterName(api.Namespace, api.Service); err != nil {
|
if err := handler.RegisterName(api.Namespace, api.Service); err != nil {
|
||||||
return nil, nil, err
|
return nil, nil, err
|
||||||
}
|
}
|
||||||
log.Debug("IPC registered", "namespace", api.Namespace)
|
log.WithField("namespace", api.Namespace).Debug("IPC server registered")
|
||||||
}
|
}
|
||||||
// All APIs registered, start the IPC listener.
|
// All APIs registered, start the IPC listener.
|
||||||
listener, err := ipcListen(ipcEndpoint)
|
listener, err := ipcListen(ipcEndpoint)
|
||||||
|
121
pkg/service.go
121
pkg/service.go
@ -33,7 +33,7 @@ import (
|
|||||||
"github.com/ethereum/go-ethereum/rpc"
|
"github.com/ethereum/go-ethereum/rpc"
|
||||||
"github.com/sirupsen/logrus"
|
"github.com/sirupsen/logrus"
|
||||||
|
|
||||||
"github.com/vulcanize/eth-statediff-service/pkg/prom"
|
"github.com/cerc-io/eth-statediff-service/pkg/prom"
|
||||||
)
|
)
|
||||||
|
|
||||||
const defaultQueueSize = 1024
|
const defaultQueueSize = 1024
|
||||||
@ -52,14 +52,14 @@ type Service struct {
|
|||||||
queue chan RangeRequest
|
queue chan RangeRequest
|
||||||
// number of ranges we can work over concurrently
|
// number of ranges we can work over concurrently
|
||||||
workers uint
|
workers uint
|
||||||
|
// ranges configured locally
|
||||||
|
preruns []RangeRequest
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewStateDiffService creates a new Service
|
// NewStateDiffService creates a new Service
|
||||||
func NewStateDiffService(lvlDBReader Reader, indexer interfaces.StateDiffIndexer, conf ServiceConfig) *Service {
|
func NewStateDiffService(lvlDBReader Reader, indexer interfaces.StateDiffIndexer, conf ServiceConfig) *Service {
|
||||||
builder := statediff.NewBuilderWithOptions(
|
builder := statediff.NewBuilder(adapt.GethStateView(lvlDBReader.StateDB()))
|
||||||
adapt.GethStateView(lvlDBReader.StateDB()),
|
builder.SetSubtrieWorkers(conf.TrieWorkers)
|
||||||
statediff.BuilderOptions{SubtrieWorkers: conf.TrieWorkers},
|
|
||||||
)
|
|
||||||
if conf.WorkerQueueSize == 0 {
|
if conf.WorkerQueueSize == 0 {
|
||||||
conf.WorkerQueueSize = defaultQueueSize
|
conf.WorkerQueueSize = defaultQueueSize
|
||||||
}
|
}
|
||||||
@ -69,6 +69,7 @@ func NewStateDiffService(lvlDBReader Reader, indexer interfaces.StateDiffIndexer
|
|||||||
indexer: indexer,
|
indexer: indexer,
|
||||||
workers: conf.ServiceWorkers,
|
workers: conf.ServiceWorkers,
|
||||||
queue: make(chan RangeRequest, conf.WorkerQueueSize),
|
queue: make(chan RangeRequest, conf.WorkerQueueSize),
|
||||||
|
preruns: conf.PreRuns,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -89,9 +90,82 @@ func (sds *Service) APIs() []rpc.API {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func segmentRange(workers, start, stop uint64, params statediff.Params) []RangeRequest {
|
||||||
|
segmentSize := ((stop - start) + 1) / workers
|
||||||
|
remainder := ((stop - start) + 1) % workers
|
||||||
|
numOfSegments := workers
|
||||||
|
if remainder > 0 {
|
||||||
|
numOfSegments++
|
||||||
|
}
|
||||||
|
segments := make([]RangeRequest, numOfSegments)
|
||||||
|
for i := range segments {
|
||||||
|
end := start + segmentSize - 1
|
||||||
|
if end > stop {
|
||||||
|
end = stop
|
||||||
|
}
|
||||||
|
segments[i] = RangeRequest{start, end, params}
|
||||||
|
start = end + 1
|
||||||
|
}
|
||||||
|
return segments
|
||||||
|
}
|
||||||
|
|
||||||
// Run does a one-off processing run on the provided RangeRequests + any pre-runs, exiting afterwards
|
// Run does a one-off processing run on the provided RangeRequests + any pre-runs, exiting afterwards
|
||||||
func (sds *Service) Run(rngs []RangeRequest) error {
|
func (sds *Service) Run(rngs []RangeRequest, parallel bool) error {
|
||||||
|
for _, preRun := range sds.preruns {
|
||||||
|
// if the rangeSize is smaller than the number of workers
|
||||||
|
// make sure we do synchronous processing to avoid quantization issues
|
||||||
|
rangeSize := (preRun.Stop - preRun.Start) + 1
|
||||||
|
numWorkers := uint64(sds.workers)
|
||||||
|
if rangeSize < numWorkers {
|
||||||
|
parallel = false
|
||||||
|
}
|
||||||
|
if parallel {
|
||||||
|
logrus.Infof("parallel processing prerun range (%d, %d) (%d blocks) divided into %d sized chunks with %d workers", preRun.Start, preRun.Stop,
|
||||||
|
rangeSize, rangeSize/numWorkers, numWorkers)
|
||||||
|
workChan := make(chan RangeRequest)
|
||||||
|
quitChan := make(chan struct{})
|
||||||
|
// spin up numWorkers number of worker goroutines
|
||||||
|
wg := new(sync.WaitGroup)
|
||||||
|
for i := 0; i < int(numWorkers); i++ {
|
||||||
|
wg.Add(1)
|
||||||
|
go func(id int) {
|
||||||
|
defer wg.Done()
|
||||||
|
for {
|
||||||
|
select {
|
||||||
|
case workerSegment := <-workChan:
|
||||||
|
for j := workerSegment.Start; j <= workerSegment.Stop; j++ {
|
||||||
|
if err := sds.WriteStateDiffAt(j, workerSegment.Params); err != nil {
|
||||||
|
logrus.Errorf("error writing statediff at height %d in range (%d, %d) : %v", id, workerSegment.Start, workerSegment.Stop, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
logrus.Infof("prerun worker %d finished processing range (%d, %d)", id, workerSegment.Start, workerSegment.Stop)
|
||||||
|
case <-quitChan:
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}(i)
|
||||||
|
}
|
||||||
|
// break range up into segments
|
||||||
|
segments := segmentRange(numWorkers, preRun.Start, preRun.Stop, preRun.Params)
|
||||||
|
// send the segments to the work channel
|
||||||
|
for _, segment := range segments {
|
||||||
|
workChan <- segment
|
||||||
|
}
|
||||||
|
close(quitChan)
|
||||||
|
wg.Wait()
|
||||||
|
} else {
|
||||||
|
logrus.Infof("sequential processing prerun range (%d, %d)", preRun.Start, preRun.Stop)
|
||||||
|
for i := preRun.Start; i <= preRun.Stop; i++ {
|
||||||
|
if err := sds.WriteStateDiffAt(i, preRun.Params); err != nil {
|
||||||
|
return fmt.Errorf("error writing statediff at height %d in range (%d, %d) : %v", i, preRun.Start, preRun.Stop, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
sds.preruns = nil
|
||||||
|
// At present this code is never called so we have not written the parallel version:
|
||||||
for _, rng := range rngs {
|
for _, rng := range rngs {
|
||||||
|
logrus.Infof("processing requested range (%d, %d)", rng.Start, rng.Stop)
|
||||||
for i := rng.Start; i <= rng.Stop; i++ {
|
for i := rng.Start; i <= rng.Stop; i++ {
|
||||||
if err := sds.WriteStateDiffAt(i, rng.Params); err != nil {
|
if err := sds.WriteStateDiffAt(i, rng.Params); err != nil {
|
||||||
return fmt.Errorf("error writing statediff at height %d in range (%d, %d) : %v", i, rng.Start, rng.Stop, err)
|
return fmt.Errorf("error writing statediff at height %d in range (%d, %d) : %v", i, rng.Start, rng.Stop, err)
|
||||||
@ -115,30 +189,35 @@ func (sds *Service) Loop(wg *sync.WaitGroup) error {
|
|||||||
for {
|
for {
|
||||||
select {
|
select {
|
||||||
case blockRange := <-sds.queue:
|
case blockRange := <-sds.queue:
|
||||||
logrus.Infof("service worker %d received range (%d, %d) off of work queue, beginning processing", id, blockRange.Start, blockRange.Stop)
|
log := logrus.WithField("range", blockRange).WithField("worker", id)
|
||||||
|
log.Debug("processing range")
|
||||||
prom.DecQueuedRanges()
|
prom.DecQueuedRanges()
|
||||||
for j := blockRange.Start; j <= blockRange.Stop; j++ {
|
for j := blockRange.Start; j <= blockRange.Stop; j++ {
|
||||||
if err := sds.WriteStateDiffAt(j, blockRange.Params); err != nil {
|
if err := sds.WriteStateDiffAt(j, blockRange.Params); err != nil {
|
||||||
logrus.Errorf("service worker %d error writing statediff at height %d in range (%d, %d) : %v", id, j, blockRange.Start, blockRange.Stop, err)
|
log.Errorf("error writing statediff at block %d: %v", j, err)
|
||||||
}
|
}
|
||||||
select {
|
select {
|
||||||
case <-sds.quitChan:
|
case <-sds.quitChan:
|
||||||
logrus.Infof("closing service worker %d\n"+
|
log.Infof("closing service worker (last processed block: %d)", j)
|
||||||
"working in range (%d, %d)\n"+
|
|
||||||
"last processed height: %d", id, blockRange.Start, blockRange.Stop, j)
|
|
||||||
return
|
return
|
||||||
default:
|
default:
|
||||||
logrus.Infof("service worker %d finished processing statediff height %d in range (%d, %d)", id, j, blockRange.Start, blockRange.Stop)
|
log.Infof("Finished processing block %d", j)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
logrus.Infof("service worker %d finished processing range (%d, %d)", id, blockRange.Start, blockRange.Stop)
|
log.Debugf("Finished processing range")
|
||||||
case <-sds.quitChan:
|
case <-sds.quitChan:
|
||||||
logrus.Infof("closing the statediff service loop worker %d", id)
|
logrus.Debugf("closing the statediff service loop worker %d", id)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}(i)
|
}(i)
|
||||||
}
|
}
|
||||||
|
for _, preRun := range sds.preruns {
|
||||||
|
if err := sds.WriteStateDiffsInRange(preRun.Start, preRun.Stop, preRun.Params); err != nil {
|
||||||
|
close(sds.quitChan)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -151,7 +230,7 @@ func (sds *Service) StateDiffAt(blockNumber uint64, params statediff.Params) (*s
|
|||||||
}
|
}
|
||||||
logrus.Infof("sending state diff at block %d", blockNumber)
|
logrus.Infof("sending state diff at block %d", blockNumber)
|
||||||
|
|
||||||
// compute leaf keys of watched addresses in the params
|
// compute leaf paths of watched addresses in the params
|
||||||
params.ComputeWatchedAddressesLeafPaths()
|
params.ComputeWatchedAddressesLeafPaths()
|
||||||
|
|
||||||
if blockNumber == 0 {
|
if blockNumber == 0 {
|
||||||
@ -171,9 +250,9 @@ func (sds *Service) StateDiffFor(blockHash common.Hash, params statediff.Params)
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
logrus.Infof("sending state diff at block %s", blockHash.Hex())
|
logrus.Infof("sending state diff at block %s", blockHash)
|
||||||
|
|
||||||
// compute leaf keys of watched addresses in the params
|
// compute leaf paths of watched addresses in the params
|
||||||
params.ComputeWatchedAddressesLeafPaths()
|
params.ComputeWatchedAddressesLeafPaths()
|
||||||
|
|
||||||
if currentBlock.NumberU64() == 0 {
|
if currentBlock.NumberU64() == 0 {
|
||||||
@ -261,7 +340,7 @@ func (sds *Service) WriteStateDiffAt(blockNumber uint64, params statediff.Params
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
// compute leaf keys of watched addresses in the params
|
// compute leaf paths of watched addresses in the params
|
||||||
params.ComputeWatchedAddressesLeafPaths()
|
params.ComputeWatchedAddressesLeafPaths()
|
||||||
|
|
||||||
parentRoot := common.Hash{}
|
parentRoot := common.Hash{}
|
||||||
@ -279,14 +358,14 @@ func (sds *Service) WriteStateDiffAt(blockNumber uint64, params statediff.Params
|
|||||||
// This operation cannot be performed back past the point of db pruning; it requires an archival node
|
// This operation cannot be performed back past the point of db pruning; it requires an archival node
|
||||||
// for historical data
|
// for historical data
|
||||||
func (sds *Service) WriteStateDiffFor(blockHash common.Hash, params statediff.Params) error {
|
func (sds *Service) WriteStateDiffFor(blockHash common.Hash, params statediff.Params) error {
|
||||||
logrus.Infof("Writing state diff for block %s", blockHash.Hex())
|
logrus.Infof("Writing state diff for block %s", blockHash)
|
||||||
t := time.Now()
|
t := time.Now()
|
||||||
currentBlock, err := sds.lvlDBReader.GetBlockByHash(blockHash)
|
currentBlock, err := sds.lvlDBReader.GetBlockByHash(blockHash)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
// compute leaf keys of watched addresses in the params
|
// compute leaf paths of watched addresses in the params
|
||||||
params.ComputeWatchedAddressesLeafPaths()
|
params.ComputeWatchedAddressesLeafPaths()
|
||||||
|
|
||||||
parentRoot := common.Hash{}
|
parentRoot := common.Hash{}
|
||||||
@ -357,7 +436,7 @@ func (sds *Service) WriteStateDiffsInRange(start, stop uint64, params statediff.
|
|||||||
select {
|
select {
|
||||||
case sds.queue <- RangeRequest{Start: start, Stop: stop, Params: params}:
|
case sds.queue <- RangeRequest{Start: start, Stop: stop, Params: params}:
|
||||||
prom.IncQueuedRanges()
|
prom.IncQueuedRanges()
|
||||||
logrus.Infof("added range (%d, %d) to the worker queue", start, stop)
|
logrus.Infof("Added range (%d, %d) to the worker queue", start, stop)
|
||||||
return nil
|
return nil
|
||||||
case <-blocked.C:
|
case <-blocked.C:
|
||||||
return fmt.Errorf("unable to add range (%d, %d) to the worker queue", start, stop)
|
return fmt.Errorf("unable to add range (%d, %d) to the worker queue", start, stop)
|
||||||
|
@ -20,6 +20,8 @@
|
|||||||
package statediff
|
package statediff
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"fmt"
|
||||||
|
|
||||||
sd "github.com/cerc-io/plugeth-statediff"
|
sd "github.com/cerc-io/plugeth-statediff"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -28,3 +30,7 @@ type RangeRequest struct {
|
|||||||
Start, Stop uint64
|
Start, Stop uint64
|
||||||
Params sd.Params
|
Params sd.Params
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (r RangeRequest) String() string {
|
||||||
|
return fmt.Sprintf("[%d,%d]", r.Start, r.Stop)
|
||||||
|
}
|
||||||
|
28
scripts/.env.example
Normal file
28
scripts/.env.example
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
# Used by the script to count rows (count-lines.sh)
|
||||||
|
COUNT_LINES_LOG=./count-lines.log
|
||||||
|
COUNT_LINES_INPUT_DIR=~/eth-statediff-service/output_dir
|
||||||
|
COUNT_LINES_OUTPUT_FILE=./output-stats.txt
|
||||||
|
|
||||||
|
# Used by the script to dedup output files (dedup.sh)
|
||||||
|
DEDUP_LOG=./dedup.log
|
||||||
|
DEDUP_INPUT_DIR=~/eth-statediff-service/output_dir
|
||||||
|
DEDUP_OUTPUT_DIR=~/eth-statediff-service/dedup_dir
|
||||||
|
DEDUP_SORT_DIR=./.sort
|
||||||
|
|
||||||
|
# Used by the script to perform column checks (check-columns.sh)
|
||||||
|
CHECK_COLUMNS_LOG=./check-columns.log
|
||||||
|
CHECK_COLUMNS_INPUT_DIR=~/eth-statediff-service/output_dir
|
||||||
|
CHECK_COLUMNS_INPUT_DEDUP_DIR=~/eth-statediff-service/dedup_dir
|
||||||
|
CHECK_COLUMNS_OUTPUT_DIR=./check-columns
|
||||||
|
|
||||||
|
# Used by the script to import data (timescaledb-import.sh)
|
||||||
|
IMPORT_LOG=./tsdb-import.log
|
||||||
|
IMPORT_INPUT_DIR=~/eth-statediff-service/output_dir
|
||||||
|
IMPORT_INPUT_DEDUP_DIR=~/eth-statediff-service/dedup_dir
|
||||||
|
TIMESCALEDB_WORKERS=8
|
||||||
|
|
||||||
|
DATABASE_USER=vdbm
|
||||||
|
DATABASE_HOSTNAME=localhost
|
||||||
|
DATABASE_PORT=8077
|
||||||
|
DATABASE_NAME=vulcanize_testing
|
||||||
|
DATABASE_PASSWORD=password
|
58
scripts/check-columns.sh
Executable file
58
scripts/check-columns.sh
Executable file
@ -0,0 +1,58 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Requires:
|
||||||
|
# CHECK_COLUMNS_LOG
|
||||||
|
# CHECK_COLUMNS_INPUT_DIR
|
||||||
|
# CHECK_COLUMNS_INPUT_DEDUP_DIR
|
||||||
|
# CHECK_COLUMNS_OUTPUT_DIR
|
||||||
|
|
||||||
|
# env file arg
|
||||||
|
ENV=$1
|
||||||
|
echo "Using env file: ${ENV}"
|
||||||
|
|
||||||
|
# read env file
|
||||||
|
export $(grep -v '^#' ${ENV} | xargs)
|
||||||
|
|
||||||
|
# redirect stdout/stderr to a file
|
||||||
|
exec >"${CHECK_COLUMNS_LOG}" 2>&1
|
||||||
|
|
||||||
|
# create output dir if not exists
|
||||||
|
mkdir -p "${CHECK_COLUMNS_OUTPUT_DIR}"
|
||||||
|
|
||||||
|
start_timestamp=$(date +%s)
|
||||||
|
|
||||||
|
declare -A expected_columns
|
||||||
|
expected_columns=(
|
||||||
|
["public.nodes"]="5"
|
||||||
|
["public.blocks"]="3"
|
||||||
|
# ["eth.access_list_elements"]="?" # skipping as values include ','
|
||||||
|
["eth.log_cids"]="12"
|
||||||
|
["eth.state_accounts"]="7"
|
||||||
|
["eth.storage_cids"]="9"
|
||||||
|
["eth.uncle_cids"]="7"
|
||||||
|
["eth.header_cids"]="16"
|
||||||
|
["eth.receipt_cids"]="10"
|
||||||
|
["eth.state_cids"]="8"
|
||||||
|
["eth.transaction_cids"]="11"
|
||||||
|
)
|
||||||
|
|
||||||
|
for table_name in "${!expected_columns[@]}";
|
||||||
|
do
|
||||||
|
if [ "${table_name}" = "public.blocks" ];
|
||||||
|
then
|
||||||
|
command="$(dirname "$0")/find-bad-rows.sh -i ${CHECK_COLUMNS_INPUT_DEDUP_DIR}/deduped-${table_name}.csv -c ${expected_columns[${table_name}]} -d true -o ${CHECK_COLUMNS_OUTPUT_DIR}/${table_name}.txt"
|
||||||
|
else
|
||||||
|
command="$(dirname "$0")/find-bad-rows.sh -i ${CHECK_COLUMNS_INPUT_DIR}/${table_name}.csv -c ${expected_columns[${table_name}]} -d true -o ${CHECK_COLUMNS_OUTPUT_DIR}/${table_name}.txt"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "${table_name}"
|
||||||
|
echo Start: "$(date)"
|
||||||
|
eval "${command}"
|
||||||
|
echo End: "$(date)"
|
||||||
|
echo Total bad rows: $(wc -l ${CHECK_COLUMNS_OUTPUT_DIR}/${table_name}.txt)
|
||||||
|
echo
|
||||||
|
done
|
||||||
|
|
||||||
|
difference=$(($(date +%s)-start_timestamp))
|
||||||
|
echo Time taken: $((difference/86400)):$(date -d@${difference} -u +%H:%M:%S)
|
||||||
|
echo
|
46
scripts/count-lines.sh
Executable file
46
scripts/count-lines.sh
Executable file
@ -0,0 +1,46 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Requires:
|
||||||
|
# COUNT_LINES_LOG
|
||||||
|
# COUNT_LINES_INPUT_DIR
|
||||||
|
# COUNT_LINES_OUTPUT_FILE
|
||||||
|
|
||||||
|
# env file arg
|
||||||
|
ENV=$1
|
||||||
|
echo "Using env file: ${ENV}"
|
||||||
|
|
||||||
|
# read env file
|
||||||
|
export $(grep -v '^#' ${ENV} | xargs)
|
||||||
|
|
||||||
|
# redirect stdout/stderr to a file
|
||||||
|
exec >"${COUNT_LINES_LOG}" 2>&1
|
||||||
|
|
||||||
|
start_timestamp=$(date +%s)
|
||||||
|
|
||||||
|
table_names=(
|
||||||
|
"public.nodes"
|
||||||
|
"public.blocks"
|
||||||
|
"eth.access_list_elements"
|
||||||
|
"eth.log_cids"
|
||||||
|
"eth.state_accounts"
|
||||||
|
"eth.storage_cids"
|
||||||
|
"eth.uncle_cids"
|
||||||
|
"eth.header_cids"
|
||||||
|
"eth.receipt_cids"
|
||||||
|
"eth.state_cids"
|
||||||
|
"eth.transaction_cids"
|
||||||
|
)
|
||||||
|
|
||||||
|
echo "Row counts:" > "${COUNT_LINES_OUTPUT_FILE}"
|
||||||
|
|
||||||
|
for table_name in "${table_names[@]}";
|
||||||
|
do
|
||||||
|
echo "${table_name}";
|
||||||
|
echo Start: "$(date)"
|
||||||
|
wc -l "${COUNT_LINES_INPUT_DIR}"/"${table_name}.csv" >> "${COUNT_LINES_OUTPUT_FILE}"
|
||||||
|
echo End: "$(date)"
|
||||||
|
echo
|
||||||
|
done
|
||||||
|
|
||||||
|
difference=$(($(date +%s)-start_timestamp))
|
||||||
|
echo Time taken: $((difference/86400)):$(date -d@${difference} -u +%H:%M:%S)
|
35
scripts/dedup.sh
Executable file
35
scripts/dedup.sh
Executable file
@ -0,0 +1,35 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Requires:
|
||||||
|
# DEDUP_LOG
|
||||||
|
# DEDUP_INPUT_DIR
|
||||||
|
# DEDUP_OUTPUT_DIR
|
||||||
|
# DEDUP_SORT_DIR
|
||||||
|
|
||||||
|
# env file arg
|
||||||
|
ENV=$1
|
||||||
|
echo "Using env file: ${ENV}"
|
||||||
|
|
||||||
|
# read env file
|
||||||
|
export $(grep -v '^#' ${ENV} | xargs)
|
||||||
|
|
||||||
|
# redirect stdout/stderr to a file
|
||||||
|
exec >"${DEDUP_LOG}" 2>&1
|
||||||
|
|
||||||
|
# create output dir if not exists
|
||||||
|
mkdir -p "${DEDUP_OUTPUT_DIR}"
|
||||||
|
|
||||||
|
start_timestamp=$(date +%s)
|
||||||
|
|
||||||
|
echo "public.blocks"
|
||||||
|
echo Start: "$(date)"
|
||||||
|
sort -T "${DEDUP_SORT_DIR}" -u "${DEDUP_INPUT_DIR}"/public.blocks.csv -o "${DEDUP_OUTPUT_DIR}"/deduped-public.blocks.csv
|
||||||
|
echo End: "$(date)"
|
||||||
|
echo Total deduped rows: $(wc -l ${DEDUP_OUTPUT_DIR}/deduped-public.blocks.csv)
|
||||||
|
echo
|
||||||
|
|
||||||
|
difference=$(($(date +%s)-start_timestamp))
|
||||||
|
echo Time taken: $((difference/86400)):$(date -d@${difference} -u +%H:%M:%S)
|
||||||
|
|
||||||
|
# NOTE: This script currently only dedups public.blocks output file.
|
||||||
|
# If the output contains blocks that were statediffed more than once, output files for other tables will have to be deduped as well.
|
43
scripts/find-bad-rows.sh
Executable file
43
scripts/find-bad-rows.sh
Executable file
@ -0,0 +1,43 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# flags
|
||||||
|
# -i <input-file>: Input data file path
|
||||||
|
# -c <expected-columns>: Expected number of columns in each row of the input file
|
||||||
|
# -o [output-file]: Output destination file path (default: STDOUT)
|
||||||
|
# -d [include-data]: Whether to include the data row in output (true | false) (default: false)
|
||||||
|
|
||||||
|
# eg: ./scripts/find-bad-rows.sh -i eth.state_cids.csv -c 8 -o res.txt -d true
|
||||||
|
# output: 1 9 1500000,xxxxxxxx,0x83952d392f9b0059eea94b10d1a095eefb1943ea91595a16c6698757127d4e1c,,
|
||||||
|
# baglacgzasvqcntdahkxhufdnkm7a22s2eetj6mx6nzkarwxtkvy4x3bubdgq,\x0f,0,f,/blocks/,
|
||||||
|
# DMQJKYBGZRQDVLT2CRWVGPQNNJNCCJU7GL7G4VAI3LZVK4OL5Q2ARTI
|
||||||
|
|
||||||
|
while getopts i:c:o:d: OPTION
|
||||||
|
do
|
||||||
|
case "${OPTION}" in
|
||||||
|
i) inputFile=${OPTARG};;
|
||||||
|
c) expectedColumns=${OPTARG};;
|
||||||
|
o) outputFile=${OPTARG};;
|
||||||
|
d) data=${OPTARG};;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
timestamp=$(date +%s)
|
||||||
|
|
||||||
|
# if data requested, dump row number, number of columns and the row
|
||||||
|
if [ "${data}" = true ] ; then
|
||||||
|
if [ -z "${outputFile}" ]; then
|
||||||
|
awk -F"," "NF!=${expectedColumns} {print NR, NF, \$0}" < ${inputFile}
|
||||||
|
else
|
||||||
|
awk -F"," "NF!=${expectedColumns} {print NR, NF, \$0}" < ${inputFile} > ${outputFile}
|
||||||
|
fi
|
||||||
|
# else, dump only row number, number of columns
|
||||||
|
else
|
||||||
|
if [ -z "${outputFile}" ]; then
|
||||||
|
awk -F"," "NF!=${expectedColumns} {print NR, NF}" < ${inputFile}
|
||||||
|
else
|
||||||
|
awk -F"," "NF!=${expectedColumns} {print NR, NF}" < ${inputFile} > ${outputFile}
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
difference=$(($(date +%s)-timestamp))
|
||||||
|
echo Time taken: $(date -d@${difference} -u +%H:%M:%S)
|
22
scripts/request-range.sh
Executable file
22
scripts/request-range.sh
Executable file
@ -0,0 +1,22 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
set -eu
|
||||||
|
|
||||||
|
FROM=$1
|
||||||
|
TO=$2
|
||||||
|
URL=127.0.0.1:8545
|
||||||
|
|
||||||
|
DATA='{
|
||||||
|
"jsonrpc": "2.0",
|
||||||
|
"method": "statediff_writeStateDiffsInRange",
|
||||||
|
"params": ['"$FROM"', '"$TO"', {
|
||||||
|
"includeBlock": true,
|
||||||
|
"includeReceipts": true,
|
||||||
|
"includeTD": true,
|
||||||
|
"includeCode": true
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"id": 1
|
||||||
|
}'
|
||||||
|
|
||||||
|
exec curl -s $URL -X POST -H 'Content-Type: application/json' --data "$DATA"
|
75
scripts/timescaledb-import.sh
Executable file
75
scripts/timescaledb-import.sh
Executable file
@ -0,0 +1,75 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Requires:
|
||||||
|
# IMPORT_LOG
|
||||||
|
# IMPORT_INPUT_DIR
|
||||||
|
# IMPORT_INPUT_DEDUP_DIR
|
||||||
|
# TIMESCALEDB_WORKERS
|
||||||
|
# DATABASE_USER
|
||||||
|
# DATABASE_HOSTNAME
|
||||||
|
# DATABASE_PORT
|
||||||
|
# DATABASE_NAME
|
||||||
|
# DATABASE_PASSWORD
|
||||||
|
|
||||||
|
DEFAULT_TIMESCALEDB_WORKERS=8
|
||||||
|
|
||||||
|
# env file arg
|
||||||
|
ENV=$1
|
||||||
|
echo "Using env file: ${ENV}"
|
||||||
|
|
||||||
|
# read env file
|
||||||
|
export $(grep -v '^#' ${ENV} | xargs)
|
||||||
|
|
||||||
|
if [ "$TIMESCALEDB_WORKERS" = "" ]; then
|
||||||
|
TIMESCALEDB_WORKERS=$DEFAULT_TIMESCALEDB_WORKERS
|
||||||
|
fi
|
||||||
|
|
||||||
|
# redirect stdout/stderr to a file
|
||||||
|
exec >"${IMPORT_LOG}" 2>&1
|
||||||
|
|
||||||
|
start_timestamp=$(date +%s)
|
||||||
|
|
||||||
|
declare -a tables
|
||||||
|
# schema-table-copyOptions
|
||||||
|
tables=(
|
||||||
|
"public-nodes"
|
||||||
|
"public-blocks"
|
||||||
|
"eth-access_list_elements"
|
||||||
|
"eth-log_cids-FORCE NOT NULL topic0, topic1, topic2, topic3 CSV"
|
||||||
|
"eth-state_accounts"
|
||||||
|
"eth-storage_cids-FORCE NOT NULL storage_leaf_key CSV"
|
||||||
|
"eth-uncle_cids"
|
||||||
|
"eth-header_cids"
|
||||||
|
"eth-receipt_cids-FORCE NOT NULL post_state, contract, contract_hash CSV"
|
||||||
|
"eth-state_cids-FORCE NOT NULL state_leaf_key CSV"
|
||||||
|
"eth-transaction_cids-FORCE NOT NULL dst CSV"
|
||||||
|
)
|
||||||
|
|
||||||
|
for elem in "${tables[@]}";
|
||||||
|
do
|
||||||
|
IFS='-' read -a arr <<< "${elem}"
|
||||||
|
|
||||||
|
if [ "${arr[0]}.${arr[1]}" = "public.blocks" ];
|
||||||
|
then
|
||||||
|
copy_command="timescaledb-parallel-copy --connection \"host=${DATABASE_HOSTNAME} port=${DATABASE_PORT} user=${DATABASE_USER} password=${DATABASE_PASSWORD} sslmode=disable\" --db-name ${DATABASE_NAME} --schema ${arr[0]} --table ${arr[1]} --file ${IMPORT_INPUT_DEDUP_DIR}/deduped-${arr[0]}.${arr[1]}.csv --workers ${TIMESCALEDB_WORKERS} --reporting-period 300s"
|
||||||
|
else
|
||||||
|
copy_command="timescaledb-parallel-copy --connection \"host=${DATABASE_HOSTNAME} port=${DATABASE_PORT} user=${DATABASE_USER} password=${DATABASE_PASSWORD} sslmode=disable\" --db-name ${DATABASE_NAME} --schema ${arr[0]} --table ${arr[1]} --file ${IMPORT_INPUT_DIR}/${arr[0]}.${arr[1]}.csv --workers ${TIMESCALEDB_WORKERS} --reporting-period 300s"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "${arr[2]}" != "" ];
|
||||||
|
then
|
||||||
|
copy_with_options="${copy_command} --copy-options \"${arr[2]}\""
|
||||||
|
else
|
||||||
|
copy_with_options=${copy_command}
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "${arr[0]}.${arr[1]}"
|
||||||
|
echo Start: "$(date)"
|
||||||
|
eval "${copy_with_options}"
|
||||||
|
echo End: "$(date)"
|
||||||
|
echo
|
||||||
|
done
|
||||||
|
|
||||||
|
difference=$(($(date +%s)-start_timestamp))
|
||||||
|
echo Time taken: $((difference/86400)):$(date -d@${difference} -u +%H:%M:%S)
|
||||||
|
echo
|
@ -17,4 +17,4 @@ mkdir -p /app/geth-rw && \
|
|||||||
sudo mount -t overlay overlay -o lowerdir=/app/geth-ro,upperdir=/tmp/overlay/upper,workdir=/tmp/overlay/work /app/geth-rw && \
|
sudo mount -t overlay overlay -o lowerdir=/app/geth-ro,upperdir=/tmp/overlay/upper,workdir=/tmp/overlay/work /app/geth-rw && \
|
||||||
|
|
||||||
echo "Running the statediff service" && \
|
echo "Running the statediff service" && \
|
||||||
exec sudo ./eth-statediff-service "$VDB_COMMAND" --config=config.toml
|
exec sudo ./eth-statediff-service "$VDB_COMMAND" --config=/config/config.toml
|
||||||
|
16
test/ci-chain.json
Normal file
16
test/ci-chain.json
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
{
|
||||||
|
"chainId": 41337,
|
||||||
|
"homesteadBlock": 0,
|
||||||
|
"eip150Block": 0,
|
||||||
|
"eip150Hash": "0x0000000000000000000000000000000000000000000000000000000000000000",
|
||||||
|
"eip155Block": 0,
|
||||||
|
"eip158Block": 0,
|
||||||
|
"byzantiumBlock": 0,
|
||||||
|
"constantinopleBlock": 0,
|
||||||
|
"petersburgBlock": 0,
|
||||||
|
"istanbulBlock": 0,
|
||||||
|
"clique": {
|
||||||
|
"period": 5,
|
||||||
|
"epoch": 30000
|
||||||
|
}
|
||||||
|
}
|
36
test/ci-config.toml
Normal file
36
test/ci-config.toml
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
[leveldb]
|
||||||
|
mode = "local"
|
||||||
|
url = "http://127.0.0.1:8082/"
|
||||||
|
|
||||||
|
[server]
|
||||||
|
ipcPath = ".ipc"
|
||||||
|
httpPath = "0.0.0.0:8545"
|
||||||
|
|
||||||
|
[statediff]
|
||||||
|
serviceWorkers = 1
|
||||||
|
workerQueueSize = 1024
|
||||||
|
trieWorkers = 4
|
||||||
|
|
||||||
|
[log]
|
||||||
|
level = "debug"
|
||||||
|
|
||||||
|
[database]
|
||||||
|
name = "cerc_testing"
|
||||||
|
hostname = "localhost"
|
||||||
|
port = 8077
|
||||||
|
user = "vdbm"
|
||||||
|
password = "password"
|
||||||
|
type = "postgres"
|
||||||
|
driver = "sqlx"
|
||||||
|
|
||||||
|
[cache]
|
||||||
|
database = 1024
|
||||||
|
trie = 1024
|
||||||
|
|
||||||
|
[ethereum]
|
||||||
|
chainConfig = "test/ci-chain.json"
|
||||||
|
nodeID = ""
|
||||||
|
clientName = "eth-statediff-service"
|
||||||
|
genesisBlock = "0x37cbb63c7150a7b60f2878433963ed8ba7e5f82fb2683ec7a945c974e1cf4e05"
|
||||||
|
networkID = 1
|
||||||
|
chainID = 41337
|
23
test/compose.yml
Normal file
23
test/compose.yml
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
services:
|
||||||
|
migrations:
|
||||||
|
restart: on-failure
|
||||||
|
depends_on:
|
||||||
|
- ipld-eth-db
|
||||||
|
image: git.vdb.to/cerc-io/ipld-eth-db/ipld-eth-db:v5.0.5-alpha
|
||||||
|
environment:
|
||||||
|
DATABASE_USER: "vdbm"
|
||||||
|
DATABASE_NAME: "cerc_testing"
|
||||||
|
DATABASE_PASSWORD: "password"
|
||||||
|
DATABASE_HOSTNAME: "ipld-eth-db"
|
||||||
|
DATABASE_PORT: 5432
|
||||||
|
|
||||||
|
ipld-eth-db:
|
||||||
|
image: timescale/timescaledb:latest-pg14
|
||||||
|
restart: always
|
||||||
|
command: ["postgres", "-c", "log_statement=all"]
|
||||||
|
environment:
|
||||||
|
POSTGRES_USER: "vdbm"
|
||||||
|
POSTGRES_DB: "cerc_testing"
|
||||||
|
POSTGRES_PASSWORD: "password"
|
||||||
|
ports:
|
||||||
|
- 127.0.0.1:8077:5432
|
Loading…
Reference in New Issue
Block a user