diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml
new file mode 100644
index 00000000..f9be1e6b
--- /dev/null
+++ b/.github/workflows/lint.yml
@@ -0,0 +1,21 @@
+name: Lint Checks
+
+on:
+ pull_request:
+ branches: '*'
+ push:
+ branches: '*'
+
+jobs:
+ test:
+ name: "Run linter"
+ runs-on: ubuntu-latest
+ steps:
+ - name: "Clone project repository"
+ uses: actions/checkout@v3
+ - name: "Install Python"
+ uses: actions/setup-python@v4
+ with:
+ python-version: '3.8'
+ - name : "Run flake8"
+ uses: py-actions/flake8@v2
diff --git a/README.md b/README.md
index 8dd6f041..ac1beaa2 100644
--- a/README.md
+++ b/README.md
@@ -16,6 +16,7 @@ Ensure that the following are already installed:
- [Python3](https://wiki.python.org/moin/BeginnersGuide/Download): `python3 --version` >= `3.8.10` (the Python3 shipped in Ubuntu 20+ is good to go)
- [Docker](https://docs.docker.com/get-docker/): `docker --version` >= `20.10.21`
- [jq](https://stedolan.github.io/jq/download/): `jq --version` >= `1.5`
+- [git](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git): `git --version` >= `2.10.3`
Note: if installing docker-compose via package manager on Linux (as opposed to Docker Desktop), you must [install the plugin](https://docs.docker.com/compose/install/linux/#install-the-plugin-manually), e.g. :
@@ -48,6 +49,18 @@ Verify operation (your version will probably be different, just check here that
laconic-so version
Version: 1.1.0-7a607c2-202304260513
```
+Save the distribution url to `~/.laconic-so/config.yml`:
+```bash
+mkdir ~/.laconic-so
+echo "distribution-url: https://github.com/cerc-io/stack-orchestrator/releases/latest/download/laconic-so" > ~/.laconic-so/config.yml"
+```
+
+### Update
+If Stack Orchestrator was installed using the process described above, it is able to subsequently self-update to the current latest version by running:
+
+```bash
+laconic-so update
+```
## Usage
diff --git a/app/base.py b/app/base.py
index abb1fa9d..cc20da1b 100644
--- a/app/base.py
+++ b/app/base.py
@@ -1,4 +1,4 @@
-# Copyright © 2022, 2023 Cerc
+# Copyright © 2022, 2023 Vulcanize
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
@@ -63,7 +63,8 @@ class package_registry_stack(base_stack):
self.url = "http://gitea.local:3000/api/packages/cerc-io/npm/"
else:
# If not, print a message about how to start it and return fail to the caller
- print("ERROR: The package-registry stack is not running, and no external registry specified with CERC_NPM_REGISTRY_URL")
+ print("ERROR: The package-registry stack is not running, and no external registry "
+ "specified with CERC_NPM_REGISTRY_URL")
print("ERROR: Start the local package registry with: laconic-so --stack package-registry deploy-system up")
return False
return True
@@ -75,5 +76,7 @@ class package_registry_stack(base_stack):
def get_npm_registry_url():
# If an auth token is not defined, we assume the default should be the cerc registry
# If an auth token is defined, we assume the local gitea should be used.
- default_npm_registry_url = "http://gitea.local:3000/api/packages/cerc-io/npm/" if config("CERC_NPM_AUTH_TOKEN", default=None) else "https://git.vdb.to/api/packages/cerc-io/npm/"
+ default_npm_registry_url = "http://gitea.local:3000/api/packages/cerc-io/npm/" if config(
+ "CERC_NPM_AUTH_TOKEN", default=None
+ ) else "https://git.vdb.to/api/packages/cerc-io/npm/"
return config("CERC_NPM_REGISTRY_URL", default=default_npm_registry_url)
diff --git a/app/build_containers.py b/app/build_containers.py
index b3b6295e..0187bb5e 100644
--- a/app/build_containers.py
+++ b/app/build_containers.py
@@ -1,4 +1,4 @@
-# Copyright © 2022, 2023 Cerc
+# Copyright © 2022, 2023 Vulcanize
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
@@ -86,6 +86,7 @@ def command(ctx, include, exclude, force_rebuild, extra_build_args):
# TODO: make this configurable
container_build_env = {
"CERC_NPM_REGISTRY_URL": get_npm_registry_url(),
+ "CERC_GO_AUTH_TOKEN": config("CERC_GO_AUTH_TOKEN", default=""),
"CERC_NPM_AUTH_TOKEN": config("CERC_NPM_AUTH_TOKEN", default=""),
"CERC_REPO_BASE_DIR": dev_root_path,
"CERC_CONTAINER_BASE_DIR": container_build_dir,
diff --git a/app/build_npms.py b/app/build_npms.py
index 6555ba91..2ffbea1b 100644
--- a/app/build_npms.py
+++ b/app/build_npms.py
@@ -1,4 +1,4 @@
-# Copyright © 2022, 2023 Cerc
+# Copyright © 2022, 2023 Vulcanize
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
@@ -30,10 +30,12 @@ from app.util import include_exclude_check, get_parsed_stack_config
builder_js_image_name = "cerc/builder-js:local"
+
@click.command()
@click.option('--include', help="only build these packages")
@click.option('--exclude', help="don\'t build these packages")
-@click.option("--force-rebuild", is_flag=True, default=False, help="Override existing target package version check -- force rebuild")
+@click.option("--force-rebuild", is_flag=True, default=False,
+ help="Override existing target package version check -- force rebuild")
@click.option("--extra-build-args", help="Supply extra arguments to build")
@click.pass_context
def command(ctx, include, exclude, force_rebuild, extra_build_args):
@@ -122,7 +124,7 @@ def command(ctx, include, exclude, force_rebuild, extra_build_args):
# envs = {"CERC_NPM_AUTH_TOKEN": npm_registry_url_token} | ({"CERC_SCRIPT_DEBUG": "true"} if debug else {})
# but that isn't available in Python 3.8 (default in Ubuntu 20) so for now we use dict.update:
envs = {"CERC_NPM_AUTH_TOKEN": npm_registry_url_token,
- "LACONIC_HOSTED_CONFIG_FILE": "config-hosted.yml" # Convention used by our web app packages
+ "LACONIC_HOSTED_CONFIG_FILE": "config-hosted.yml" # Convention used by our web app packages
}
envs.update({"CERC_SCRIPT_DEBUG": "true"} if debug else {})
envs.update({"CERC_FORCE_REBUILD": "true"} if force_rebuild else {})
diff --git a/app/command_types.py b/app/command_types.py
new file mode 100644
index 00000000..44df9e4b
--- /dev/null
+++ b/app/command_types.py
@@ -0,0 +1,27 @@
+# Copyright © 2023 Vulcanize
+
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+
+from dataclasses import dataclass
+
+
+@dataclass
+class CommandOptions:
+ stack: str
+ quiet: bool = False
+ verbose: bool = False
+ dry_run: bool = False
+ local_stack: bool = False
+ debug: bool = False
+ continue_on_error: bool = False
diff --git a/app/data/compose/docker-compose-fixturenet-eth.yml b/app/data/compose/docker-compose-fixturenet-eth.yml
index 663b53c5..bd25c528 100644
--- a/app/data/compose/docker-compose-fixturenet-eth.yml
+++ b/app/data/compose/docker-compose-fixturenet-eth.yml
@@ -40,6 +40,7 @@ services:
- fixturenet-eth-bootnode-geth
ports:
- "8545"
+ - "8546"
- "40000"
- "6060"
diff --git a/app/data/compose/docker-compose-fixturenet-laconic-console.yml b/app/data/compose/docker-compose-fixturenet-laconic-console.yml
index 23185957..da2fd95f 100644
--- a/app/data/compose/docker-compose-fixturenet-laconic-console.yml
+++ b/app/data/compose/docker-compose-fixturenet-laconic-console.yml
@@ -3,6 +3,7 @@ services:
restart: unless-stopped
image: cerc/laconic-console-host:local
environment:
+ - CERC_WEBAPP_FILES_DIR=${CERC_WEBAPP_FILES_DIR:-/usr/local/share/.config/yarn/global/node_modules/@cerc-io/console-app/dist/production}
- LACONIC_HOSTED_ENDPOINT=${LACONIC_HOSTED_ENDPOINT:-http://localhost}
ports:
- "80"
diff --git a/app/data/compose/docker-compose-fixturenet-laconicd.yml b/app/data/compose/docker-compose-fixturenet-laconicd.yml
index 5037687c..641229d4 100644
--- a/app/data/compose/docker-compose-fixturenet-laconicd.yml
+++ b/app/data/compose/docker-compose-fixturenet-laconicd.yml
@@ -15,7 +15,7 @@ services:
- "6060"
- "26657"
- "26656"
- - "9473:9473"
+ - "9473"
- "8545"
- "8546"
- "9090"
diff --git a/app/data/compose/docker-compose-fixturenet-plugeth.yml b/app/data/compose/docker-compose-fixturenet-plugeth.yml
index 598b25ea..f31ef559 100644
--- a/app/data/compose/docker-compose-fixturenet-plugeth.yml
+++ b/app/data/compose/docker-compose-fixturenet-plugeth.yml
@@ -20,7 +20,7 @@ services:
- SYS_PTRACE
environment:
CERC_REMOTE_DEBUG: ${CERC_REMOTE_DEBUG:-true}
- CERC_RUN_STATEDIFF: "detect"
+ CERC_RUN_STATEDIFF: ${CERC_RUN_STATEDIFF:-detect}
CERC_STATEDIFF_DB_NODE_ID: 1
CERC_SCRIPT_DEBUG: ${CERC_SCRIPT_DEBUG}
env_file:
diff --git a/app/data/compose/docker-compose-fixturenet-sushiswap-subgraph-v3.yml b/app/data/compose/docker-compose-fixturenet-sushiswap-subgraph-v3.yml
new file mode 100644
index 00000000..acdf47ed
--- /dev/null
+++ b/app/data/compose/docker-compose-fixturenet-sushiswap-subgraph-v3.yml
@@ -0,0 +1,27 @@
+version: '3.2'
+
+services:
+ # Deploys the sushiswap v3 subgraph
+ sushiswap-subgraph-v3:
+ image: cerc/sushiswap-subgraphs:local
+ restart: on-failure
+ depends_on:
+ graph-node:
+ condition: service_healthy
+ environment:
+ - APP=v3
+ - NETWORK=lotus-fixturenet
+ command: ["bash", "-c", "./v3/run-v3.sh && ./blocks/run-blocks.sh"]
+ working_dir: /app/subgraphs
+ volumes:
+ - ../config/fixturenet-sushiswap-subgraph-v3/lotus-fixturenet.js.template:/app/config/lotus-fixturenet.js.template
+ - ../config/fixturenet-sushiswap-subgraph-v3/run-blocks.sh:/app/subgraphs/blocks/run-blocks.sh
+ - ../config/fixturenet-sushiswap-subgraph-v3/run-v3.sh:/app/subgraphs/v3/run-v3.sh
+ - sushiswap_core_deployment:/app/subgraphs/v3/core-deployments/docker
+ - sushiswap_periphery_deployment:/app/subgraphs/v3/periphery-deployments/docker
+ extra_hosts:
+ - "host.docker.internal:host-gateway"
+
+volumes:
+ sushiswap_core_deployment:
+ sushiswap_periphery_deployment:
diff --git a/app/data/compose/docker-compose-go-nitro.yml b/app/data/compose/docker-compose-go-nitro.yml
new file mode 100644
index 00000000..414a83ce
--- /dev/null
+++ b/app/data/compose/docker-compose-go-nitro.yml
@@ -0,0 +1,39 @@
+version: '3.7'
+
+services:
+ go-nitro:
+ image: cerc/go-nitro:local
+ hostname: go-nitro
+ restart: on-failure
+ depends_on:
+ # Wait for Nitro contracts to be deployed
+ nitro-contracts:
+ condition: service_completed_successfully
+ environment:
+ NITRO_CHAIN_URL: ${NITRO_CHAIN_URL:-ws://fixturenet-eth-geth-1:8546}
+ NITRO_PK: ${NITRO_PK:-2d999770f7b5d49b694080f987b82bbc9fc9ac2b4dcc10b0f8aba7d700f69c6d}
+ NITRO_CHAIN_PK: ${NITRO_CHAIN_PK:-570b909da9669b2f35a0b1ac70b8358516d55ae1b5b3710e95e9a94395090597}
+ NITRO_USE_DURABLE_STORE: ${NITRO_USE_DURABLE_STORE:-true}
+ NITRO_DURABLE_STORE_FOLDER: ${NITRO_DURABLE_STORE_FOLDER:-/app/data/nitro-store}
+ CERC_NA_ADDRESS: ${CERC_NA_ADDRESS}
+ CERC_VPA_ADDRESS: ${CERC_VPA_ADDRESS}
+ CERC_CA_ADDRESS: ${CERC_CA_ADDRESS}
+ entrypoint: ["bash", "-c", "/app/run-nitro-node.sh"]
+ volumes:
+ - go_nitro_data:/app/data
+ - nitro_deployment:/app/deployment
+ - ../config/go-nitro/run-nitro-node.sh:/app/run-nitro-node.sh
+ healthcheck:
+ test: ["CMD", "nc", "-vz", "localhost", "4005"]
+ interval: 10s
+ timeout: 5s
+ retries: 10
+ start_period: 10s
+ ports:
+ - "3005"
+ - "4005"
+ - "5005:5005"
+
+volumes:
+ go_nitro_data:
+ nitro_deployment:
diff --git a/app/data/compose/docker-compose-fixturenet-graph-node.yml b/app/data/compose/docker-compose-graph-node.yml
similarity index 80%
rename from app/data/compose/docker-compose-fixturenet-graph-node.yml
rename to app/data/compose/docker-compose-graph-node.yml
index eb47dc2b..e35ff494 100644
--- a/app/data/compose/docker-compose-fixturenet-graph-node.yml
+++ b/app/data/compose/docker-compose-graph-node.yml
@@ -6,8 +6,6 @@ services:
condition: service_healthy
ipfs:
condition: service_healthy
- lotus-node-1:
- condition: service_healthy
extra_hosts:
- host.docker.internal:host-gateway
environment:
@@ -17,9 +15,13 @@ services:
postgres_user: graph-node
postgres_pass: password
postgres_db: graph-node
- # TODO: Get endpoint from env
- ethereum: 'lotus-fixturenet:http://lotus-node-1:1234/rpc/v1'
- GRAPH_LOG: info
+ ethereum: ${ETH_NETWORKS:-lotus-fixturenet:http://lotus-node-1:1234/rpc/v1}
+ GRAPH_LOG: debug
+ ETHEREUM_REORG_THRESHOLD: 3
+ entrypoint: ["bash", "-c"]
+ # Wait for ETH RPC endpoint to be up when running with fixturenet-lotus
+ command: |
+ "wait_for ${ETH_RPC_HOST:-lotus-node-1}:${ETH_RPC_PORT:-1234} -t 1800 -- start"
ports:
- "8000"
- "8001"
diff --git a/app/data/compose/docker-compose-mainnet-eth-api-proxy.yml b/app/data/compose/docker-compose-mainnet-eth-api-proxy.yml
new file mode 100644
index 00000000..8e3b0845
--- /dev/null
+++ b/app/data/compose/docker-compose-mainnet-eth-api-proxy.yml
@@ -0,0 +1,15 @@
+version: '3.8'
+
+services:
+ redis:
+ hostname: mainnet-eth-api-proxy-redis
+ image: redis:7-alpine
+ command: redis-server --save "" --appendonly no --maxmemory-policy allkeys-lru --maxmemory 1G
+
+ eth-api-proxy:
+ hostname: mainnet-eth-api-proxy
+ image: cerc/eth-api-proxy:local
+ env_file:
+ - ../config/mainnet-eth-api-proxy/ethpxy.env
+ ports:
+ - 8547
diff --git a/app/data/compose/docker-compose-mainnet-eth-keycloak.yml b/app/data/compose/docker-compose-mainnet-eth-keycloak.yml
new file mode 100644
index 00000000..dfa9a804
--- /dev/null
+++ b/app/data/compose/docker-compose-mainnet-eth-keycloak.yml
@@ -0,0 +1,51 @@
+version: '3.8'
+
+services:
+ keycloak-db:
+ image: postgres:14-alpine
+ env_file:
+ - ../config/mainnet-eth-keycloak/keycloak.env
+ healthcheck:
+ test: ["CMD", "nc", "-v", "localhost", "5432"]
+ interval: 30s
+ timeout: 10s
+ retries: 10
+ start_period: 3s
+ volumes:
+ - mainnet_eth_keycloak_db:/var/lib/postgresql/data
+ ports:
+ - 5432
+
+ keycloak:
+ image: cerc/keycloak:local
+ env_file:
+ - ../config/mainnet-eth-keycloak/keycloak.env
+ environment:
+ JAVA_OPTS_APPEND: "-Dkeycloak.migration.action=import -Dkeycloak.migration.provider=dir -Dkeycloak.migration.dir=/import -Dkeycloak.migration.strategy=IGNORE_EXISTING"
+ volumes:
+ - ../config/mainnet-eth-keycloak/import:/import
+ ports:
+ - 8080
+ command: ["start"]
+ depends_on:
+ keycloak-db:
+ condition: service_healthy
+
+ keycloak-reg-ui:
+ image: cerc/keycloak-reg-ui:local
+ env_file:
+ - ../config/mainnet-eth-keycloak/keycloak.env
+ volumes:
+ - ../config/mainnet-eth-keycloak/ui:/config
+ ports:
+ - 80
+
+ keycloak-reg-api:
+ image: cerc/keycloak-reg-api:local
+ env_file:
+ - ../config/mainnet-eth-keycloak/keycloak.env
+ ports:
+ - 9292
+
+volumes:
+ mainnet_eth_keycloak_db:
diff --git a/app/data/compose/docker-compose-mainnet-eth-metrics.yml b/app/data/compose/docker-compose-mainnet-eth-metrics.yml
new file mode 100644
index 00000000..03c223e7
--- /dev/null
+++ b/app/data/compose/docker-compose-mainnet-eth-metrics.yml
@@ -0,0 +1,25 @@
+version: "3.2"
+services:
+ prometheus:
+ restart: always
+ image: prom/prometheus
+ depends_on:
+ mainnet-eth-geth-1:
+ condition: service_healthy
+ env_file:
+ - ../config/mainnet-eth-metrics/metrics.env
+ volumes:
+ - ../config/mainnet-eth-metrics/prometheus/etc:/etc/prometheus
+ ports:
+ - "9090"
+ grafana:
+ restart: always
+ image: grafana/grafana
+ env_file:
+ - ../config/mainnet-eth-metrics/metrics.env
+ volumes:
+ - ../config/mainnet-eth-metrics/grafana/etc/provisioning/dashboards:/etc/grafana/provisioning/dashboards
+ - ../config/mainnet-eth-metrics/grafana/etc/provisioning/datasources:/etc/grafana/provisioning/datasources
+ - ../config/mainnet-eth-metrics/grafana/etc/dashboards:/etc/grafana/dashboards
+ ports:
+ - "3000"
diff --git a/app/data/compose/docker-compose-mainnet-eth.yml b/app/data/compose/docker-compose-mainnet-eth.yml
index 7b44a88e..1a6fc529 100644
--- a/app/data/compose/docker-compose-mainnet-eth.yml
+++ b/app/data/compose/docker-compose-mainnet-eth.yml
@@ -6,16 +6,13 @@ services:
hostname: mainnet-eth-geth-1
cap_add:
- SYS_PTRACE
- environment:
- CERC_REMOTE_DEBUG: "true"
- CERC_RUN_STATEDIFF: ${CERC_RUN_STATEDIFF:-detect}
- CERC_STATEDIFF_DB_NODE_ID: 1
- CERC_SCRIPT_DEBUG: ${CERC_SCRIPT_DEBUG}
image: cerc/go-ethereum:local
entrypoint: /bin/sh
command: -c "/opt/run-geth.sh"
+ env_file:
+ - ../config/mainnet-eth/geth.env
volumes:
- - mainnet_eth_geth_1_data:/root/ethdata
+ - mainnet_eth_geth_1_data:/data
- mainnet_eth_config_data:/etc/mainnet-eth
- ../config/mainnet-eth/scripts/run-geth.sh:/opt/run-geth.sh
healthcheck:
@@ -25,30 +22,48 @@ services:
retries: 10
start_period: 3s
ports:
+ # http api
- "8545"
+ # ws api
+ - "8546"
+ # ws el
+ - "8551"
+ # p2p
+ - "30303"
+ - "30303/udp"
+ # debugging
- "40000"
+ # metrics
- "6060"
mainnet-eth-lighthouse-1:
restart: always
hostname: mainnet-eth-lighthouse-1
healthcheck:
- test: ["CMD", "wget", "--tries=1", "--connect-timeout=1", "--quiet", "-O", "-", "http://localhost:8001/eth/v2/beacon/blocks/head"]
+ test: ["CMD", "wget", "--tries=1", "--connect-timeout=1", "--quiet", "-O", "-", "http://localhost:5052/eth/v2/beacon/blocks/head"]
interval: 30s
timeout: 10s
retries: 10
start_period: 30s
environment:
- EXECUTION_ENDPOINT: "http://mainnet-eth-geth-1:8551"
+ LIGHTHOUSE_EXECUTION_ENDPOINT: "http://mainnet-eth-geth-1:8551"
+ env_file:
+ - ../config/mainnet-eth/lighthouse.env
image: cerc/lighthouse:local
entrypoint: /bin/sh
command: -c "/opt/run-lighthouse.sh"
volumes:
- - mainnet_eth_lighthouse_1_data:/var/lighthouse-data-dir
+ - mainnet_eth_lighthouse_1_data:/data
- mainnet_eth_config_data:/etc/mainnet-eth
- ../config/mainnet-eth/scripts/run-lighthouse.sh:/opt/run-lighthouse.sh
ports:
- - "8001"
+ # api
+ - "5052"
+ # metrics
+ - "5054"
+ # p2p
+ - "9000"
+ - "9000/udp"
volumes:
mainnet_eth_config_data:
diff --git a/app/data/compose/docker-compose-mainnet-laconicd.yml b/app/data/compose/docker-compose-mainnet-laconicd.yml
index 78d2cd2f..ff2f3376 100644
--- a/app/data/compose/docker-compose-mainnet-laconicd.yml
+++ b/app/data/compose/docker-compose-mainnet-laconicd.yml
@@ -2,20 +2,22 @@ services:
laconicd:
restart: no
image: cerc/laconicd:local
- command: ["/bin/sh", "-c", "while :; do sleep 600; done"]
+ command: ["/bin/sh", "-c", "/opt/run-laconicd.sh"]
volumes:
# The cosmos-sdk node's database directory:
- laconicd-data:/root/.laconicd/data
+ - laconicd-config:/root/.laconicd/config
+ - laconicd-keyring:/root/.laconicd/keyring-test
# TODO: look at folding these scripts into the container
- - ../config/mainnet-laconicd/create-fixturenet.sh:/docker-entrypoint-scripts.d/create-fixturenet.sh
- - ../config/mainnet-laconicd/export-mykey.sh:/docker-entrypoint-scripts.d/export-mykey.sh
- - ../config/mainnet-laconicd/export-myaddress.sh:/docker-entrypoint-scripts.d/export-myaddress.sh
+ - ../config/mainnet-laconicd/scripts/run-laconicd.sh:/opt/run-laconicd.sh
+ - ../config/mainnet-laconicd/scripts/export-mykey.sh:/docker-entrypoint-scripts.d/export-mykey.sh
+ - ../config/mainnet-laconicd/scripts/export-myaddress.sh:/docker-entrypoint-scripts.d/export-myaddress.sh
# TODO: determine which of the ports below is really needed
ports:
- "6060"
- "26657"
- "26656"
- - "9473:9473"
+ - "9473"
- "8545"
- "8546"
- "9090"
@@ -28,3 +30,5 @@ services:
volumes:
laconicd-data:
+ laconicd-config:
+ laconicd-keyring:
diff --git a/app/data/compose/docker-compose-mobymask-app-v3.yml b/app/data/compose/docker-compose-mobymask-app-v3.yml
new file mode 100644
index 00000000..244fb29b
--- /dev/null
+++ b/app/data/compose/docker-compose-mobymask-app-v3.yml
@@ -0,0 +1,45 @@
+version: '3.2'
+
+services:
+ # Builds and serves the MobyMask v3 react-app
+ mobymask-v3-app:
+ restart: unless-stopped
+ image: cerc/mobymask-ui:local
+ env_file:
+ - ../config/watcher-mobymask-v3/mobymask-params.env
+ environment:
+ CERC_SCRIPT_DEBUG: ${CERC_SCRIPT_DEBUG}
+ CERC_CHAIN_ID: ${CERC_CHAIN_ID}
+ CERC_DEPLOYED_CONTRACT: ${CERC_DEPLOYED_CONTRACT}
+ CERC_RELAY_NODES: ${CERC_RELAY_NODES}
+ CERC_DENY_MULTIADDRS: ${CERC_DENY_MULTIADDRS}
+ CERC_PUBSUB: ${CERC_PUBSUB}
+ CERC_GOSSIPSUB_DIRECT_PEERS: ${CERC_GOSSIPSUB_DIRECT_PEERS}
+ CERC_NA_ADDRESS: ${CERC_NA_ADDRESS}
+ CERC_VPA_ADDRESS: ${CERC_VPA_ADDRESS}
+ CERC_CA_ADDRESS: ${CERC_CA_ADDRESS}
+ CERC_APP_WATCHER_URL: ${CERC_APP_WATCHER_URL}
+ CERC_PAYMENT_NITRO_ADDRESS: ${CERC_PAYMENT_NITRO_ADDRESS:-0xBBB676f9cFF8D242e9eaC39D063848807d3D1D94}
+ CERC_SNAP_URL: ${CERC_SNAP_URL}
+ working_dir: /app
+ command: ["bash", "/scripts/mobymask-app-start.sh"]
+ volumes:
+ - ../config/watcher-mobymask-v3/mobymask-app-start.sh:/scripts/mobymask-app-start.sh
+ - peers_ids:/peers
+ - mobymask_deployment:/server
+ - nitro_deployment:/nitro
+ ports:
+ - "127.0.0.1:3004:80"
+ healthcheck:
+ test: ["CMD", "nc", "-vz", "localhost", "80"]
+ interval: 20s
+ timeout: 5s
+ retries: 15
+ start_period: 10s
+ extra_hosts:
+ - "host.docker.internal:host-gateway"
+
+volumes:
+ peers_ids:
+ mobymask_deployment:
+ nitro_deployment:
diff --git a/app/data/compose/docker-compose-mobymask-app.yml b/app/data/compose/docker-compose-mobymask-app.yml
index c50622c7..100342ae 100644
--- a/app/data/compose/docker-compose-mobymask-app.yml
+++ b/app/data/compose/docker-compose-mobymask-app.yml
@@ -14,7 +14,8 @@ services:
CERC_APP_WATCHER_URL: ${CERC_APP_WATCHER_URL}
CERC_RELAY_NODES: ${CERC_RELAY_NODES}
CERC_DENY_MULTIADDRS: ${CERC_DENY_MULTIADDRS}
- CERC_RELEASE: "v0.1.5"
+ CERC_PUBSUB: ${CERC_PUBSUB}
+ CERC_RELEASE: "v0.1.7"
CERC_USE_NPM: true
CERC_CONFIG_FILE: "src/config.json"
working_dir: /scripts
@@ -48,7 +49,8 @@ services:
CERC_APP_WATCHER_URL: ${CERC_APP_WATCHER_URL}
CERC_RELAY_NODES: ${CERC_RELAY_NODES}
CERC_DENY_MULTIADDRS: ${CERC_DENY_MULTIADDRS}
- CERC_RELEASE: "v0.1.5-lxdao-0.1.1"
+ CERC_PUBSUB: ${CERC_PUBSUB}
+ CERC_RELEASE: "v0.1.7-lxdao-0.1.1"
CERC_USE_NPM: false
CERC_CONFIG_FILE: "src/utils/config.json"
working_dir: /scripts
diff --git a/app/data/compose/docker-compose-mobymask-snap.yml b/app/data/compose/docker-compose-mobymask-snap.yml
new file mode 100644
index 00000000..d2cec254
--- /dev/null
+++ b/app/data/compose/docker-compose-mobymask-snap.yml
@@ -0,0 +1,14 @@
+version: '3.7'
+
+services:
+ mobymask-snap:
+ restart: unless-stopped
+ image: cerc/mobymask-snap:local
+ ports:
+ - "127.0.0.1:8080:8080"
+ healthcheck:
+ test: ["CMD", "nc", "-vz", "localhost", "8080"]
+ interval: 10s
+ timeout: 5s
+ retries: 10
+ start_period: 10s
diff --git a/app/data/compose/docker-compose-nitro-contracts.yml b/app/data/compose/docker-compose-nitro-contracts.yml
new file mode 100644
index 00000000..8357d25a
--- /dev/null
+++ b/app/data/compose/docker-compose-nitro-contracts.yml
@@ -0,0 +1,23 @@
+version: '3.7'
+
+services:
+ # Optionally deploys the Nitro contracts
+ nitro-contracts:
+ image: cerc/nitro-contracts:local
+ restart: on-failure
+ environment:
+ CERC_SCRIPT_DEBUG: ${CERC_SCRIPT_DEBUG}
+ CERC_ETH_RPC_ENDPOINT: ${CERC_ETH_RPC_ENDPOINT:-http://fixturenet-eth-geth-1:8545}
+ CERC_PRIVATE_KEY_DEPLOYER: ${CERC_PRIVATE_KEY_DEPLOYER:-0x888814df89c4358d7ddb3fa4b0213e7331239a80e1f013eaa7b2deca2a41a218}
+ CERC_NA_ADDRESS: ${CERC_NA_ADDRESS}
+ CERC_VPA_ADDRESS: ${CERC_VPA_ADDRESS}
+ CERC_CA_ADDRESS: ${CERC_CA_ADDRESS}
+ volumes:
+ - ../config/nitro-contracts/deploy.sh:/app/deploy.sh
+ - nitro_deployment:/app/deployment
+ command: ["bash", "-c", "/app/deploy.sh"]
+ extra_hosts:
+ - "host.docker.internal:host-gateway"
+
+volumes:
+ nitro_deployment:
diff --git a/app/data/compose/docker-compose-nitro-reverse-payment-proxy.yml b/app/data/compose/docker-compose-nitro-reverse-payment-proxy.yml
new file mode 100644
index 00000000..37f49407
--- /dev/null
+++ b/app/data/compose/docker-compose-nitro-reverse-payment-proxy.yml
@@ -0,0 +1,27 @@
+version: '3.7'
+
+services:
+ nitro-reverse-payment-proxy:
+ image: cerc/go-nitro:local
+ hostname: nitro-reverse-payment-proxy
+ restart: on-failure
+ depends_on:
+ # Wait for the go-nitro node to start
+ go-nitro:
+ condition: service_healthy
+ environment:
+ PROXY_ADDRESS: 0.0.0.0:8081
+ PROXY_NITRO_ENDPOINT: ${PROXY_NITRO_ENDPOINT:-go-nitro:4005/api/v1}
+ PROXY_DESTINATION_URL: ${PROXY_DESTINATION_URL:-http://ipld-eth-server:8081}
+ PROXY_COST_PER_BYTE: ${PROXY_COST_PER_BYTE:-1}
+ entrypoint: ["bash", "-c", "/app/run-reverse-payment-proxy.sh"]
+ volumes:
+ - ../config/go-nitro/run-reverse-payment-proxy.sh:/app/run-reverse-payment-proxy.sh
+ healthcheck:
+ test: ["CMD", "nc", "-vz", "localhost", "8081"]
+ interval: 10s
+ timeout: 5s
+ retries: 10
+ start_period: 10s
+ ports:
+ - "8081:8081"
diff --git a/app/data/compose/docker-compose-ponder.yml b/app/data/compose/docker-compose-ponder.yml
new file mode 100644
index 00000000..62d46b47
--- /dev/null
+++ b/app/data/compose/docker-compose-ponder.yml
@@ -0,0 +1,31 @@
+version: '3.7'
+
+services:
+ ponder-app:
+ restart: unless-stopped
+ image: cerc/ponder:local
+ working_dir: /app/examples/token-erc20
+ environment:
+ PONDER_CHAIN_ID: ${PONDER_CHAIN_ID:-99}
+ PONDER_RPC_URL_1: ${PONDER_RPC_URL_1:-http://nitro-reverse-payment-proxy:8081}
+ CERC_PONDER_NITRO_PK: ${CERC_PONDER_NITRO_PK:-58368d20ff12f17669c06158c21d885897aa56f9be430edc789614bf9851d53f}
+ CERC_PONDER_NITRO_CHAIN_PK: ${CERC_PONDER_NITRO_CHAIN_PK:-fb1e9af328c283ca3e2486e7c24d13582b7912057d8b9542ff41503c85bc05c0}
+ CERC_PONDER_NITRO_CHAIN_URL: ${CERC_PONDER_NITRO_CHAIN_URL:-http://fixturenet-eth-geth-1:8545}
+ CERC_RELAY_MULTIADDR: ${CERC_RELAY_MULTIADDR}
+ CERC_UPSTREAM_NITRO_ADDRESS: ${CERC_UPSTREAM_NITRO_ADDRESS:-0xAAA6628Ec44A8a742987EF3A114dDFE2D4F7aDCE}
+ CERC_UPSTREAM_NITRO_MULTIADDR: ${CERC_UPSTREAM_NITRO_MULTIADDR:-/dns4/go-nitro/tcp/5005/ws/p2p/16Uiu2HAmSjXJqsyBJgcBUU2HQmykxGseafSatbpq5471XmuaUqyv}
+ CERC_UPSTREAM_NITRO_PAY_AMOUNT: ${CERC_UPSTREAM_NITRO_PAY_AMOUNT:-5000}
+ command: ["bash", "./ponder-start.sh"]
+ volumes:
+ - ../config/ponder/ponder-start.sh:/app/examples/token-erc20/ponder-start.sh
+ - ../config/ponder/ponder.config.ts:/app/examples/token-erc20/ponder.config.ts
+ - peers_ids:/peers
+ - nitro_deployment:/nitro
+ - ponder_nitro_data:/app/examples/token-erc20/.ponder/nitro-db
+ extra_hosts:
+ - "host.docker.internal:host-gateway"
+
+volumes:
+ peers_ids:
+ nitro_deployment:
+ ponder_nitro_data:
diff --git a/app/data/compose/docker-compose-sushiswap-subgraph-v3.yml b/app/data/compose/docker-compose-sushiswap-subgraph-v3.yml
index b4a7b313..57635f27 100644
--- a/app/data/compose/docker-compose-sushiswap-subgraph-v3.yml
+++ b/app/data/compose/docker-compose-sushiswap-subgraph-v3.yml
@@ -10,17 +10,12 @@ services:
condition: service_healthy
environment:
- APP=v3
- - NETWORK=lotus-fixturenet
- command: ["bash", "-c", "./run-v3.sh"]
- working_dir: /app/subgraphs/v3
+ - NETWORK=filecoin
+ command: ["bash", "-c", "./blocks/run-blocks.sh && ./v3/run-v3.sh"]
+ working_dir: /app/subgraphs
volumes:
- - ../config/sushiswap-subgraph-v3/lotus-fixturenet.js.template:/app/config/lotus-fixturenet.js.template
+ - ../config/sushiswap-subgraph-v3/filecoin.js:/app/config/filecoin.js
+ - ../config/sushiswap-subgraph-v3/run-blocks.sh:/app/subgraphs/blocks/run-blocks.sh
- ../config/sushiswap-subgraph-v3/run-v3.sh:/app/subgraphs/v3/run-v3.sh
- - sushiswap_core_deployment:/app/subgraphs/v3/core-deployments/docker
- - sushiswap_periphery_deployment:/app/subgraphs/v3/deployments/docker
extra_hosts:
- "host.docker.internal:host-gateway"
-
-volumes:
- sushiswap_core_deployment:
- sushiswap_periphery_deployment:
diff --git a/app/data/compose/docker-compose-watcher-mobymask-v2.yml b/app/data/compose/docker-compose-watcher-mobymask-v2.yml
index bed274c6..176135fb 100644
--- a/app/data/compose/docker-compose-watcher-mobymask-v2.yml
+++ b/app/data/compose/docker-compose-watcher-mobymask-v2.yml
@@ -84,6 +84,7 @@ services:
CERC_PRIVATE_KEY_PEER: ${CERC_PRIVATE_KEY_PEER}
CERC_RELAY_PEERS: ${CERC_RELAY_PEERS}
CERC_DENY_MULTIADDRS: ${CERC_DENY_MULTIADDRS}
+ CERC_PUBSUB: ${CERC_PUBSUB}
CERC_RELAY_ANNOUNCE_DOMAIN: ${CERC_RELAY_ANNOUNCE_DOMAIN}
CERC_ENABLE_PEER_L2_TXS: ${CERC_ENABLE_PEER_L2_TXS}
CERC_DEPLOYED_CONTRACT: ${CERC_DEPLOYED_CONTRACT}
diff --git a/app/data/compose/docker-compose-watcher-mobymask-v3.yml b/app/data/compose/docker-compose-watcher-mobymask-v3.yml
new file mode 100644
index 00000000..107c9bba
--- /dev/null
+++ b/app/data/compose/docker-compose-watcher-mobymask-v3.yml
@@ -0,0 +1,122 @@
+version: '3.2'
+
+services:
+ # Starts the PostgreSQL database for watcher
+ mobymask-watcher-db:
+ restart: unless-stopped
+ image: postgres:14-alpine
+ environment:
+ - POSTGRES_USER=vdbm
+ - POSTGRES_MULTIPLE_DATABASES=mobymask-watcher,mobymask-watcher-job-queue
+ - POSTGRES_EXTENSION=mobymask-watcher-job-queue:pgcrypto
+ - POSTGRES_PASSWORD=password
+ volumes:
+ - ../config/postgresql/multiple-postgressql-databases.sh:/docker-entrypoint-initdb.d/multiple-postgressql-databases.sh
+ - mobymask_watcher_db_data:/var/lib/postgresql/data
+ ports:
+ - "127.0.0.1:15432:5432"
+ healthcheck:
+ test: ["CMD", "nc", "-v", "localhost", "5432"]
+ interval: 20s
+ timeout: 5s
+ retries: 15
+ start_period: 10s
+
+ # Deploys the MobyMask contract and generates an invite link
+ # Deployment is skipped if CERC_DEPLOYED_CONTRACT env is set
+ mobymask:
+ image: cerc/mobymask:local
+ working_dir: /app/packages/server
+ env_file:
+ - ../config/watcher-mobymask-v3/mobymask-params.env
+ environment:
+ CERC_SCRIPT_DEBUG: ${CERC_SCRIPT_DEBUG}
+ ENV: "PROD"
+ CERC_ETH_RPC_ENDPOINT: ${CERC_ETH_RPC_ENDPOINT}
+ CERC_PRIVATE_KEY_DEPLOYER: ${CERC_PRIVATE_KEY_DEPLOYER:-0x888814df89c4358d7ddb3fa4b0213e7331239a80e1f013eaa7b2deca2a41a218}
+ CERC_MOBYMASK_APP_BASE_URI: ${CERC_MOBYMASK_APP_BASE_URI}
+ CERC_DEPLOYED_CONTRACT: ${CERC_DEPLOYED_CONTRACT}
+ command: ["bash", "-c", "./deploy-and-generate-invite.sh"]
+ volumes:
+ - ../config/watcher-mobymask-v3/deploy-and-generate-invite.sh:/app/packages/server/deploy-and-generate-invite.sh
+ - mobymask_deployment:/app/packages/server
+ extra_hosts:
+ - "host.docker.internal:host-gateway"
+
+ # Creates peer-id files if they don't exist
+ peer-ids-gen:
+ image: cerc/watcher-ts:local
+ restart: on-failure
+ environment:
+ CERC_SCRIPT_DEBUG: ${CERC_SCRIPT_DEBUG}
+ working_dir: /app/packages/peer
+ command: ["sh", "generate-peer-ids.sh"]
+ volumes:
+ - ../config/watcher-mobymask-v2/generate-peer-ids.sh:/app/packages/peer/generate-peer-ids.sh
+ - peers_ids:/peer-ids
+
+ # Starts the MobyMask v3 watcher server
+ mobymask-watcher-server:
+ image: cerc/watcher-mobymask-v3:local
+ restart: unless-stopped
+ depends_on:
+ mobymask-watcher-db:
+ condition: service_healthy
+ peer-ids-gen:
+ condition: service_completed_successfully
+ mobymask:
+ condition: service_completed_successfully
+ nitro-contracts:
+ condition: service_completed_successfully
+ env_file:
+ - ../config/watcher-mobymask-v3/mobymask-params.env
+ environment:
+ CERC_SCRIPT_DEBUG: ${CERC_SCRIPT_DEBUG}
+ CERC_ETH_RPC_QUERY_ENDPOINT: ${CERC_ETH_RPC_QUERY_ENDPOINT}
+ CERC_ETH_RPC_MUTATION_ENDPOINT: ${CERC_ETH_RPC_MUTATION_ENDPOINT}
+ CERC_RELAY_PEERS: ${CERC_RELAY_PEERS}
+ CERC_DENY_MULTIADDRS: ${CERC_DENY_MULTIADDRS}
+ CERC_PUBSUB: ${CERC_PUBSUB}
+ CERC_RELAY_ANNOUNCE_DOMAIN: ${CERC_RELAY_ANNOUNCE_DOMAIN}
+ CERC_ENABLE_PEER_L2_TXS: ${CERC_ENABLE_PEER_L2_TXS}
+ CERC_DEPLOYED_CONTRACT: ${CERC_DEPLOYED_CONTRACT}
+ CERC_NA_ADDRESS: ${CERC_NA_ADDRESS}
+ CERC_VPA_ADDRESS: ${CERC_VPA_ADDRESS}
+ CERC_CA_ADDRESS: ${CERC_CA_ADDRESS}
+ CERC_PRIVATE_KEY_PEER: ${CERC_PRIVATE_KEY_PEER:-111b7500bdce494d6f4bcfe8c2a0dde2ef92f751d9070fac6475dbd6d8021b3f}
+ CERC_WATCHER_NITRO_PK: ${CERC_WATCHER_NITRO_PK:-0279651921cd800ac560c21ceea27aab0107b67daf436cdd25ce84cad30159b4}
+ CERC_PEER_ID: ${CERC_PEER_ID}
+ CERC_ENABLE_UPSTREAM_PAYMENTS: ${CERC_ENABLE_UPSTREAM_PAYMENTS}
+ CERC_UPSTREAM_NITRO_ADDRESS: ${CERC_UPSTREAM_NITRO_ADDRESS:-0xAAA6628Ec44A8a742987EF3A114dDFE2D4F7aDCE}
+ CERC_UPSTREAM_NITRO_MULTIADDR: ${CERC_UPSTREAM_NITRO_MULTIADDR:-/dns4/go-nitro/tcp/5005/ws/p2p/16Uiu2HAmSjXJqsyBJgcBUU2HQmykxGseafSatbpq5471XmuaUqyv}
+ CERC_UPSTREAM_NITRO_PAY_AMOUNT: ${CERC_UPSTREAM_NITRO_PAY_AMOUNT:-5000}
+ command: ["bash", "./start-server.sh"]
+ volumes:
+ - ../config/watcher-mobymask-v3/watcher-config-template.toml:/app/environments/watcher-config-template.toml
+ - ../config/watcher-mobymask-v3/watcher-config-rates.toml:/app/environments/rates.toml
+ - ../config/watcher-mobymask-v3/keys:/app/keys
+ - ../config/watcher-mobymask-v3/start-server.sh:/app/start-server.sh
+ - watcher_nitro_data:/app/out/nitro-db
+ - peers_ids:/app/peers
+ - nitro_deployment:/nitro
+ - mobymask_deployment:/server
+ # Expose GQL, metrics and relay node ports
+ ports:
+ - "127.0.0.1:3001:3001"
+ - "127.0.0.1:9001:9001"
+ - "127.0.0.1:9090:9090"
+ healthcheck:
+ test: ["CMD", "busybox", "nc", "localhost", "9090"]
+ interval: 20s
+ timeout: 5s
+ retries: 15
+ start_period: 5s
+ extra_hosts:
+ - "host.docker.internal:host-gateway"
+
+volumes:
+ mobymask_watcher_db_data:
+ peers_ids:
+ mobymask_deployment:
+ nitro_deployment:
+ watcher_nitro_data:
diff --git a/app/data/compose/docker-compose-watcher-mobymask.yml b/app/data/compose/docker-compose-watcher-mobymask.yml
index f54e1454..686fa313 100644
--- a/app/data/compose/docker-compose-watcher-mobymask.yml
+++ b/app/data/compose/docker-compose-watcher-mobymask.yml
@@ -24,15 +24,37 @@ services:
retries: 15
start_period: 10s
- mobymask-watcher-server:
+ mobymask-watcher-job-runner:
restart: unless-stopped
depends_on:
mobymask-watcher-db:
condition: service_healthy
image: cerc/watcher-mobymask:local
+ command: ["sh", "-c", "yarn job-runner"]
+ volumes:
+ - ../config/watcher-mobymask/mobymask-watcher.toml:/app/environments/local.toml
+ ports:
+ - "0.0.0.0:9000:9000"
+ extra_hosts:
+ - "ipld-eth-server:host-gateway"
+ healthcheck:
+ test: ["CMD", "nc", "-v", "localhost", "9000"]
+ interval: 20s
+ timeout: 5s
+ retries: 15
+ start_period: 5s
+
+ mobymask-watcher-server:
+ restart: unless-stopped
+ depends_on:
+ mobymask-watcher-db:
+ condition: service_healthy
+ mobymask-watcher-job-runner:
+ condition: service_healthy
+ image: cerc/watcher-mobymask:local
command: ["sh", "-c", "yarn server"]
volumes:
- - ../config/watcher-mobymask/mobymask-watcher.toml:/app/packages/mobymask-watcher/environments/local.toml
+ - ../config/watcher-mobymask/mobymask-watcher.toml:/app/environments/local.toml
ports:
- "0.0.0.0:3001:3001"
- "0.0.0.0:9001:9001"
@@ -45,21 +67,5 @@ services:
retries: 15
start_period: 5s
- mobymask-watcher-job-runner:
- restart: unless-stopped
- depends_on:
- mobymask-watcher-server:
- condition: service_healthy
- mobymask-watcher-db:
- condition: service_healthy
- image: cerc/watcher-mobymask:local
- command: ["sh", "-c", "yarn job-runner"]
- volumes:
- - ../config/watcher-mobymask/mobymask-watcher.toml:/app/packages/mobymask-watcher/environments/local.toml
- ports:
- - "0.0.0.0:9000:9000"
- extra_hosts:
- - "ipld-eth-server:host-gateway"
-
volumes:
mobymask_watcher_db_data:
diff --git a/app/data/config/fixturenet-lotus/setup-miner.sh b/app/data/config/fixturenet-lotus/setup-miner.sh
index 2e9efb0a..998f5bea 100644
--- a/app/data/config/fixturenet-lotus/setup-miner.sh
+++ b/app/data/config/fixturenet-lotus/setup-miner.sh
@@ -23,7 +23,6 @@ if [ ! -f /root/data/localnet.json ]; then
fi
# start daemon
-# /root/.lotus-shared/devgen.car path
nohup lotus daemon --lotus-make-genesis=/root/.lotus-shared/devgen.car --profile=bootstrapper --genesis-template=/root/data/localnet.json --bootstrap=false > /var/log/lotus.log 2>&1 &
# Loop until the daemon is started
@@ -33,9 +32,6 @@ while ! grep -q "started ChainNotify channel" /var/log/lotus.log ; do
done
echo "Daemon started."
-# copy genesis file to shared volume
-cp /devgen.car /root/.lotus-shared
-
# if miner not already initialized
if [ ! -d $LOTUS_MINER_PATH ]; then
# initialize miner
@@ -44,6 +40,7 @@ if [ ! -d $LOTUS_MINER_PATH ]; then
# fund a known account for usage
/fund-account.sh
+ echo "Initializing miner..."
lotus-miner init --genesis-miner --actor=t01000 --sector-size=2KiB --pre-sealed-sectors=/root/data/.genesis-sectors --pre-sealed-metadata=/root/data/.genesis-sectors/pre-seal-t01000.json --nosync
fi
diff --git a/app/data/config/sushiswap-subgraph-v3/lotus-fixturenet.js.template b/app/data/config/fixturenet-sushiswap-subgraph-v3/lotus-fixturenet.js.template
similarity index 66%
rename from app/data/config/sushiswap-subgraph-v3/lotus-fixturenet.js.template
rename to app/data/config/fixturenet-sushiswap-subgraph-v3/lotus-fixturenet.js.template
index aee2cf97..94bfb6d8 100644
--- a/app/data/config/sushiswap-subgraph-v3/lotus-fixturenet.js.template
+++ b/app/data/config/fixturenet-sushiswap-subgraph-v3/lotus-fixturenet.js.template
@@ -1,5 +1,9 @@
module.exports = {
network: 'lotus-fixturenet',
+ blocks: {
+ address: '0x0000000000000000000000000000000000000000',
+ startBlock: 0,
+ },
v3: {
factory: {
address: 'FACTORY_ADDRESS',
@@ -13,8 +17,8 @@ module.exports = {
whitelistedTokenAddresses: [
'NATIVE_ADDRESS',
],
- stableTokenAddresses: [
- ],
+ stableTokenAddresses: [],
+ nativePricePool: '0x0000000000000000000000000000000000000000',
minimumEthLocked: 1.5
}
}
diff --git a/app/data/config/fixturenet-sushiswap-subgraph-v3/run-blocks.sh b/app/data/config/fixturenet-sushiswap-subgraph-v3/run-blocks.sh
new file mode 100755
index 00000000..72af062a
--- /dev/null
+++ b/app/data/config/fixturenet-sushiswap-subgraph-v3/run-blocks.sh
@@ -0,0 +1,15 @@
+#!/bin/bash
+
+set -e
+
+echo "Building blocks subgraph and deploying to graph-node..."
+
+cd blocks
+
+pnpm run generate
+pnpm run build
+
+pnpm exec graph create --node http://graph-node:8020/ sushiswap/blocks
+pnpm exec graph deploy --node http://graph-node:8020/ --ipfs http://ipfs:5001 --version-label 0.1.0 sushiswap/blocks
+
+echo "Done"
diff --git a/app/data/config/fixturenet-sushiswap-subgraph-v3/run-v3.sh b/app/data/config/fixturenet-sushiswap-subgraph-v3/run-v3.sh
new file mode 100755
index 00000000..402fa889
--- /dev/null
+++ b/app/data/config/fixturenet-sushiswap-subgraph-v3/run-v3.sh
@@ -0,0 +1,42 @@
+#!/bin/bash
+
+set -e
+
+cd v3
+
+# Loop until the NFPM deployment is detected
+echo "Waiting for sushiswap-periphery deployments to occur"
+while [ ! -f ./periphery-deployments/docker/NonfungiblePositionManager.json ]; do
+ sleep 5
+done
+
+echo "Reading contract addresses and block numbers from deployments"
+FACTORY_ADDRESS=$(jq -r '.address' ./core-deployments/docker/UniswapV3Factory.json)
+FACTORY_BLOCK=$(jq -r '.receipt.blockNumber' ./core-deployments/docker/UniswapV3Factory.json)
+NATIVE_ADDRESS=$(jq -r '.address' ./periphery-deployments/docker/WFIL.json)
+NFPM_ADDRESS=$(jq -r '.address' ./periphery-deployments/docker/NonfungiblePositionManager.json)
+NFPM_BLOCK=$(jq -r '.receipt.blockNumber' ./periphery-deployments/docker/NonfungiblePositionManager.json)
+
+# Read the JavaScript file content
+file_content=$( /app/config/lotus-fixturenet.js
+
+echo "Building v3 subgraph and deploying to graph-node..."
+
+pnpm run generate
+pnpm run build
+
+pnpm exec graph create --node http://graph-node:8020/ sushiswap/v3-lotus
+pnpm exec graph deploy --node http://graph-node:8020/ --ipfs http://ipfs:5001 --version-label 0.1.0 sushiswap/v3-lotus
+
+echo "Done"
diff --git a/app/data/config/go-nitro/run-nitro-node.sh b/app/data/config/go-nitro/run-nitro-node.sh
new file mode 100755
index 00000000..3131a826
--- /dev/null
+++ b/app/data/config/go-nitro/run-nitro-node.sh
@@ -0,0 +1,35 @@
+#!/bin/bash
+
+set -e
+if [ -n "$CERC_SCRIPT_DEBUG" ]; then
+ set -x
+fi
+
+nitro_addresses_file="/app/deployment/nitro-addresses.json"
+
+# Check if CERC_NA_ADDRESS environment variable is set
+if [ -n "$CERC_NA_ADDRESS" ]; then
+ echo "CERC_NA_ADDRESS is set to '$CERC_NA_ADDRESS'"
+ echo "CERC_VPA_ADDRESS is set to '$CERC_VPA_ADDRESS'"
+ echo "CERC_CA_ADDRESS is set to '$CERC_CA_ADDRESS'"
+ echo "Using the above Nitro addresses"
+
+ NA_ADDRESS=${CERC_NA_ADDRESS}
+ VPA_ADDRESS=${CERC_VPA_ADDRESS}
+ CA_ADDRESS=${CERC_CA_ADDRESS}
+elif [ -f ${nitro_addresses_file} ]; then
+ echo "Reading Nitro addresses from ${nitro_addresses_file}"
+
+ NA_ADDRESS=$(jq -r '.nitroAdjudicatorAddress' ${nitro_addresses_file})
+ VPA_ADDRESS=$(jq -r '.virtualPaymentAppAddress' ${nitro_addresses_file})
+ CA_ADDRESS=$(jq -r '.consensusAppAddress' ${nitro_addresses_file})
+else
+ echo "File ${nitro_addresses_file} not found"
+ exit 1
+fi
+
+echo "Running Nitro node"
+
+# TODO Wait for RPC endpoint to come up
+
+./nitro -chainurl ${NITRO_CHAIN_URL} -msgport 3005 -rpcport 4005 -wsmsgport 5005 -pk ${NITRO_PK} -chainpk ${NITRO_CHAIN_PK} -naaddress ${NA_ADDRESS} -vpaaddress ${VPA_ADDRESS} -caaddress ${CA_ADDRESS} -usedurablestore ${NITRO_USE_DURABLE_STORE} -durablestorefolder ${NITRO_DURABLE_STORE_FOLDER}
diff --git a/app/data/config/go-nitro/run-reverse-payment-proxy.sh b/app/data/config/go-nitro/run-reverse-payment-proxy.sh
new file mode 100755
index 00000000..f884f20a
--- /dev/null
+++ b/app/data/config/go-nitro/run-reverse-payment-proxy.sh
@@ -0,0 +1,14 @@
+#!/bin/bash
+
+set -e
+if [ -n "$CERC_SCRIPT_DEBUG" ]; then
+ set -x
+fi
+
+echo "Running Nitro reverse payment proxy"
+echo "Using PROXY_ADDRESS ${PROXY_ADDRESS}"
+echo "Using PROXY_NITRO_ENDPOINT ${PROXY_NITRO_ENDPOINT}"
+echo "Using PROXY_DESTINATION_URL ${PROXY_DESTINATION_URL}"
+echo "Using PROXY_COST_PER_BYTE ${PROXY_COST_PER_BYTE}"
+
+./start-reverse-payment-proxy -proxyaddress ${PROXY_ADDRESS} -nitroendpoint=${PROXY_NITRO_ENDPOINT} -destinationurl=${PROXY_DESTINATION_URL} -costperbyte ${PROXY_COST_PER_BYTE} -enablepaidrpcmethods
diff --git a/app/data/config/mainnet-eth-api-proxy/ethpxy.env b/app/data/config/mainnet-eth-api-proxy/ethpxy.env
new file mode 100644
index 00000000..aeed33d1
--- /dev/null
+++ b/app/data/config/mainnet-eth-api-proxy/ethpxy.env
@@ -0,0 +1,6 @@
+CERC_ETHPXY_LISTEN_PORT=8547
+CERC_ETHPXY_LISTEN_ADDR='0.0.0.0'
+CERC_ETHPXY_REDIS_URL='redis://mainnet-eth-api-proxy-redis:6379'
+CERC_ETHPXY_REDIS_MAX_AGE=120000
+CERC_ETHPXY_GETH_WS_URL='ws://mainnet-eth-geth-1:8546'
+CERC_ETHPXY_GETH_HTTP_URL='http://mainnet-eth-geth-1:8545'
diff --git a/app/data/config/mainnet-eth-keycloak/import/cerc-realm.json b/app/data/config/mainnet-eth-keycloak/import/cerc-realm.json
new file mode 100644
index 00000000..b6b6b606
--- /dev/null
+++ b/app/data/config/mainnet-eth-keycloak/import/cerc-realm.json
@@ -0,0 +1,2391 @@
+{
+ "id": "cerc",
+ "realm": "cerc",
+ "notBefore": 0,
+ "defaultSignatureAlgorithm": "RS256",
+ "revokeRefreshToken": false,
+ "refreshTokenMaxReuse": 0,
+ "accessTokenLifespan": 300,
+ "accessTokenLifespanForImplicitFlow": 900,
+ "ssoSessionIdleTimeout": 1800,
+ "ssoSessionMaxLifespan": 36000,
+ "ssoSessionIdleTimeoutRememberMe": 0,
+ "ssoSessionMaxLifespanRememberMe": 0,
+ "offlineSessionIdleTimeout": 2592000,
+ "offlineSessionMaxLifespanEnabled": false,
+ "offlineSessionMaxLifespan": 5184000,
+ "clientSessionIdleTimeout": 0,
+ "clientSessionMaxLifespan": 0,
+ "clientOfflineSessionIdleTimeout": 0,
+ "clientOfflineSessionMaxLifespan": 0,
+ "accessCodeLifespan": 60,
+ "accessCodeLifespanUserAction": 300,
+ "accessCodeLifespanLogin": 1800,
+ "actionTokenGeneratedByAdminLifespan": 43200,
+ "actionTokenGeneratedByUserLifespan": 300,
+ "oauth2DeviceCodeLifespan": 600,
+ "oauth2DevicePollingInterval": 5,
+ "enabled": true,
+ "sslRequired": "external",
+ "registrationAllowed": false,
+ "registrationEmailAsUsername": false,
+ "rememberMe": false,
+ "verifyEmail": false,
+ "loginWithEmailAllowed": true,
+ "duplicateEmailsAllowed": false,
+ "resetPasswordAllowed": false,
+ "editUsernameAllowed": false,
+ "bruteForceProtected": false,
+ "permanentLockout": false,
+ "maxFailureWaitSeconds": 900,
+ "minimumQuickLoginWaitSeconds": 60,
+ "waitIncrementSeconds": 60,
+ "quickLoginCheckMilliSeconds": 1000,
+ "maxDeltaTimeSeconds": 43200,
+ "failureFactor": 30,
+ "roles": {
+ "realm": [
+ {
+ "id": "0d341d8a-1f5a-4aa2-8152-1e2a9d3775bd",
+ "name": "uma_authorization",
+ "description": "${role_uma_authorization}",
+ "composite": false,
+ "clientRole": false,
+ "containerId": "cerc",
+ "attributes": {}
+ },
+ {
+ "id": "7da1172a-c7d2-463d-8fb7-466a04803cc8",
+ "name": "offline_access",
+ "description": "${role_offline-access}",
+ "composite": false,
+ "clientRole": false,
+ "containerId": "cerc",
+ "attributes": {}
+ },
+ {
+ "id": "211646ea-04a3-467e-9f25-f7539a405d03",
+ "name": "default-roles-cerc",
+ "description": "${role_default-roles}",
+ "composite": true,
+ "composites": {
+ "realm": [
+ "offline_access",
+ "uma_authorization"
+ ]
+ },
+ "clientRole": false,
+ "containerId": "cerc",
+ "attributes": {}
+ }
+ ],
+ "client": {
+ "realm-management": [
+ {
+ "id": "caa5575f-aa68-4cd4-bf23-d4718aaf7a74",
+ "name": "view-identity-providers",
+ "description": "${role_view-identity-providers}",
+ "composite": false,
+ "clientRole": true,
+ "containerId": "1a91181f-823b-4cbf-9d7a-f5f097a00d73",
+ "attributes": {}
+ },
+ {
+ "id": "b0f59506-14be-4802-85bb-91e48e10795d",
+ "name": "create-client",
+ "description": "${role_create-client}",
+ "composite": false,
+ "clientRole": true,
+ "containerId": "1a91181f-823b-4cbf-9d7a-f5f097a00d73",
+ "attributes": {}
+ },
+ {
+ "id": "d1ffefd0-e63c-4473-9334-1da2023a2379",
+ "name": "query-users",
+ "description": "${role_query-users}",
+ "composite": false,
+ "clientRole": true,
+ "containerId": "1a91181f-823b-4cbf-9d7a-f5f097a00d73",
+ "attributes": {}
+ },
+ {
+ "id": "9b251fe8-a743-4be4-943c-5c8fc8efb59c",
+ "name": "impersonation",
+ "description": "${role_impersonation}",
+ "composite": false,
+ "clientRole": true,
+ "containerId": "1a91181f-823b-4cbf-9d7a-f5f097a00d73",
+ "attributes": {}
+ },
+ {
+ "id": "305f3c42-4385-49fa-90b0-bd35f3a6593f",
+ "name": "manage-users",
+ "description": "${role_manage-users}",
+ "composite": false,
+ "clientRole": true,
+ "containerId": "1a91181f-823b-4cbf-9d7a-f5f097a00d73",
+ "attributes": {}
+ },
+ {
+ "id": "c0745551-9565-4748-92b6-7540f8e4a4c8",
+ "name": "view-authorization",
+ "description": "${role_view-authorization}",
+ "composite": false,
+ "clientRole": true,
+ "containerId": "1a91181f-823b-4cbf-9d7a-f5f097a00d73",
+ "attributes": {}
+ },
+ {
+ "id": "d333ddcd-6377-48e6-bcad-83248ce42820",
+ "name": "manage-authorization",
+ "description": "${role_manage-authorization}",
+ "composite": false,
+ "clientRole": true,
+ "containerId": "1a91181f-823b-4cbf-9d7a-f5f097a00d73",
+ "attributes": {}
+ },
+ {
+ "id": "3bbac383-be19-4d98-9fb0-b8ba17f73765",
+ "name": "view-realm",
+ "description": "${role_view-realm}",
+ "composite": false,
+ "clientRole": true,
+ "containerId": "1a91181f-823b-4cbf-9d7a-f5f097a00d73",
+ "attributes": {}
+ },
+ {
+ "id": "547e5883-a235-49e0-bbc1-b4b089e3d4c5",
+ "name": "realm-admin",
+ "description": "${role_realm-admin}",
+ "composite": true,
+ "composites": {
+ "client": {
+ "realm-management": [
+ "view-identity-providers",
+ "create-client",
+ "query-users",
+ "manage-users",
+ "impersonation",
+ "view-authorization",
+ "manage-authorization",
+ "view-realm",
+ "manage-events",
+ "query-realms",
+ "query-clients",
+ "manage-clients",
+ "view-events",
+ "view-clients",
+ "view-users",
+ "manage-realm",
+ "manage-identity-providers",
+ "query-groups"
+ ]
+ }
+ },
+ "clientRole": true,
+ "containerId": "1a91181f-823b-4cbf-9d7a-f5f097a00d73",
+ "attributes": {}
+ },
+ {
+ "id": "17e70842-fbc1-4c51-b79d-6ebac50c60e7",
+ "name": "manage-events",
+ "description": "${role_manage-events}",
+ "composite": false,
+ "clientRole": true,
+ "containerId": "1a91181f-823b-4cbf-9d7a-f5f097a00d73",
+ "attributes": {}
+ },
+ {
+ "id": "bcf1a6f8-600b-4f27-a51a-8152f80da8a9",
+ "name": "query-realms",
+ "description": "${role_query-realms}",
+ "composite": false,
+ "clientRole": true,
+ "containerId": "1a91181f-823b-4cbf-9d7a-f5f097a00d73",
+ "attributes": {}
+ },
+ {
+ "id": "bd85653b-3664-4d38-ac53-a662464bd9be",
+ "name": "query-clients",
+ "description": "${role_query-clients}",
+ "composite": false,
+ "clientRole": true,
+ "containerId": "1a91181f-823b-4cbf-9d7a-f5f097a00d73",
+ "attributes": {}
+ },
+ {
+ "id": "5dbed8a7-1936-4df4-86e5-880c368b172f",
+ "name": "manage-clients",
+ "description": "${role_manage-clients}",
+ "composite": false,
+ "clientRole": true,
+ "containerId": "1a91181f-823b-4cbf-9d7a-f5f097a00d73",
+ "attributes": {}
+ },
+ {
+ "id": "c4a6bd05-d72b-4206-a831-318530aa8d84",
+ "name": "view-events",
+ "description": "${role_view-events}",
+ "composite": false,
+ "clientRole": true,
+ "containerId": "1a91181f-823b-4cbf-9d7a-f5f097a00d73",
+ "attributes": {}
+ },
+ {
+ "id": "19b36fad-1537-4b8e-9b1a-c5f3ef2830bf",
+ "name": "view-clients",
+ "description": "${role_view-clients}",
+ "composite": true,
+ "composites": {
+ "client": {
+ "realm-management": [
+ "query-clients"
+ ]
+ }
+ },
+ "clientRole": true,
+ "containerId": "1a91181f-823b-4cbf-9d7a-f5f097a00d73",
+ "attributes": {}
+ },
+ {
+ "id": "38e7b8be-e2de-4b88-a2b3-54fa3a6bb26e",
+ "name": "manage-realm",
+ "description": "${role_manage-realm}",
+ "composite": false,
+ "clientRole": true,
+ "containerId": "1a91181f-823b-4cbf-9d7a-f5f097a00d73",
+ "attributes": {}
+ },
+ {
+ "id": "11f6d5d4-d883-493b-ad1d-9818d7fd6248",
+ "name": "view-users",
+ "description": "${role_view-users}",
+ "composite": true,
+ "composites": {
+ "client": {
+ "realm-management": [
+ "query-users",
+ "query-groups"
+ ]
+ }
+ },
+ "clientRole": true,
+ "containerId": "1a91181f-823b-4cbf-9d7a-f5f097a00d73",
+ "attributes": {}
+ },
+ {
+ "id": "93020a9b-cb4d-484a-9f65-a0a663d42fb8",
+ "name": "manage-identity-providers",
+ "description": "${role_manage-identity-providers}",
+ "composite": false,
+ "clientRole": true,
+ "containerId": "1a91181f-823b-4cbf-9d7a-f5f097a00d73",
+ "attributes": {}
+ },
+ {
+ "id": "81cec017-13ec-473c-960b-1c84db230fc2",
+ "name": "query-groups",
+ "description": "${role_query-groups}",
+ "composite": false,
+ "clientRole": true,
+ "containerId": "1a91181f-823b-4cbf-9d7a-f5f097a00d73",
+ "attributes": {}
+ }
+ ],
+ "security-admin-console": [],
+ "dashboard-client": [],
+ "admin-cli": [],
+ "account-console": [],
+ "broker": [],
+ "account": [
+ {
+ "id": "df36afa2-d09f-4235-9b80-97790f444bb3",
+ "name": "manage-account",
+ "composite": false,
+ "clientRole": true,
+ "containerId": "1ff40495-e44c-4cbc-886a-87c3ca1edc9d",
+ "attributes": {}
+ },
+ {
+ "id": "eaaf957e-c77a-4d89-9408-ef15e31e3500",
+ "name": "delete-account",
+ "description": "${role_delete-account}",
+ "composite": false,
+ "clientRole": true,
+ "containerId": "1ff40495-e44c-4cbc-886a-87c3ca1edc9d",
+ "attributes": {}
+ },
+ {
+ "id": "ba9ee05e-c4bd-44fe-b127-ba2b6a3b8cd5",
+ "name": "view-groups",
+ "composite": false,
+ "clientRole": true,
+ "containerId": "1ff40495-e44c-4cbc-886a-87c3ca1edc9d",
+ "attributes": {}
+ }
+ ]
+ }
+ },
+ "groups": [
+ {
+ "id": "28f8feac-7483-4c9d-9a27-81e1564e461e",
+ "name": "allaccess",
+ "path": "/allaccess",
+ "attributes": {},
+ "realmRoles": [],
+ "clientRoles": {},
+ "subGroups": []
+ },
+ {
+ "id": "d2a0736e-e3fc-4c23-9ebd-c56b1fd44939",
+ "name": "eth",
+ "path": "/eth",
+ "attributes": {},
+ "realmRoles": [],
+ "clientRoles": {},
+ "subGroups": []
+ }
+ ],
+ "defaultRole": {
+ "id": "211646ea-04a3-467e-9f25-f7539a405d03",
+ "name": "default-roles-cerc",
+ "description": "${role_default-roles}",
+ "composite": true,
+ "clientRole": false,
+ "containerId": "cerc"
+ },
+ "requiredCredentials": [
+ "password"
+ ],
+ "otpPolicyType": "totp",
+ "otpPolicyAlgorithm": "HmacSHA1",
+ "otpPolicyInitialCounter": 0,
+ "otpPolicyDigits": 6,
+ "otpPolicyLookAheadWindow": 1,
+ "otpPolicyPeriod": 30,
+ "otpPolicyCodeReusable": false,
+ "otpSupportedApplications": [
+ "totpAppFreeOTPName",
+ "totpAppGoogleName"
+ ],
+ "webAuthnPolicyRpEntityName": "keycloak",
+ "webAuthnPolicySignatureAlgorithms": [
+ "ES256"
+ ],
+ "webAuthnPolicyRpId": "",
+ "webAuthnPolicyAttestationConveyancePreference": "not specified",
+ "webAuthnPolicyAuthenticatorAttachment": "not specified",
+ "webAuthnPolicyRequireResidentKey": "not specified",
+ "webAuthnPolicyUserVerificationRequirement": "not specified",
+ "webAuthnPolicyCreateTimeout": 0,
+ "webAuthnPolicyAvoidSameAuthenticatorRegister": false,
+ "webAuthnPolicyAcceptableAaguids": [],
+ "webAuthnPolicyPasswordlessRpEntityName": "keycloak",
+ "webAuthnPolicyPasswordlessSignatureAlgorithms": [
+ "ES256"
+ ],
+ "webAuthnPolicyPasswordlessRpId": "",
+ "webAuthnPolicyPasswordlessAttestationConveyancePreference": "not specified",
+ "webAuthnPolicyPasswordlessAuthenticatorAttachment": "not specified",
+ "webAuthnPolicyPasswordlessRequireResidentKey": "not specified",
+ "webAuthnPolicyPasswordlessUserVerificationRequirement": "not specified",
+ "webAuthnPolicyPasswordlessCreateTimeout": 0,
+ "webAuthnPolicyPasswordlessAvoidSameAuthenticatorRegister": false,
+ "webAuthnPolicyPasswordlessAcceptableAaguids": [],
+ "users": [
+ {
+ "id": "70af487b-c6d8-4f51-84d2-a23e8c9df7a3",
+ "createdTimestamp": 1670910521308,
+ "username": "service-account-dashboard-client",
+ "enabled": true,
+ "totp": false,
+ "emailVerified": false,
+ "serviceAccountClientId": "dashboard-client",
+ "disableableCredentialTypes": [],
+ "requiredActions": [],
+ "notBefore": 0,
+ "groups": []
+ }
+ ],
+ "scopeMappings": [
+ {
+ "clientScope": "offline_access",
+ "roles": [
+ "offline_access"
+ ]
+ }
+ ],
+ "clientScopeMappings": {
+ "account": [
+ {
+ "client": "account-console",
+ "roles": [
+ "manage-account",
+ "view-groups"
+ ]
+ }
+ ]
+ },
+ "clients": [
+ {
+ "id": "1ff40495-e44c-4cbc-886a-87c3ca1edc9d",
+ "clientId": "account",
+ "name": "${client_account}",
+ "rootUrl": "${authBaseUrl}",
+ "baseUrl": "/realms/cerc/account/",
+ "surrogateAuthRequired": false,
+ "enabled": true,
+ "alwaysDisplayInConsole": false,
+ "clientAuthenticatorType": "client-secret",
+ "secret": "**********",
+ "redirectUris": [
+ "/realms/cerc/account/*"
+ ],
+ "webOrigins": [],
+ "notBefore": 0,
+ "bearerOnly": false,
+ "consentRequired": false,
+ "standardFlowEnabled": true,
+ "implicitFlowEnabled": false,
+ "directAccessGrantsEnabled": false,
+ "serviceAccountsEnabled": false,
+ "publicClient": false,
+ "frontchannelLogout": false,
+ "protocol": "openid-connect",
+ "attributes": {
+ "post.logout.redirect.uris": "+"
+ },
+ "authenticationFlowBindingOverrides": {},
+ "fullScopeAllowed": false,
+ "nodeReRegistrationTimeout": 0,
+ "defaultClientScopes": [
+ "web-origins",
+ "profile",
+ "roles",
+ "email"
+ ],
+ "optionalClientScopes": [
+ "address",
+ "phone",
+ "offline_access",
+ "microprofile-jwt"
+ ]
+ },
+ {
+ "id": "75b3bc74-dd4d-4d0a-940c-f1a809c004a6",
+ "clientId": "account-console",
+ "name": "${client_account-console}",
+ "rootUrl": "${authBaseUrl}",
+ "baseUrl": "/realms/cerc/account/",
+ "surrogateAuthRequired": false,
+ "enabled": true,
+ "alwaysDisplayInConsole": false,
+ "clientAuthenticatorType": "client-secret",
+ "redirectUris": [
+ "/realms/cerc/account/*"
+ ],
+ "webOrigins": [],
+ "notBefore": 0,
+ "bearerOnly": false,
+ "consentRequired": false,
+ "standardFlowEnabled": true,
+ "implicitFlowEnabled": false,
+ "directAccessGrantsEnabled": false,
+ "serviceAccountsEnabled": false,
+ "publicClient": true,
+ "frontchannelLogout": false,
+ "protocol": "openid-connect",
+ "attributes": {
+ "post.logout.redirect.uris": "+",
+ "pkce.code.challenge.method": "S256"
+ },
+ "authenticationFlowBindingOverrides": {},
+ "fullScopeAllowed": false,
+ "nodeReRegistrationTimeout": 0,
+ "protocolMappers": [
+ {
+ "id": "4ec0bc59-9111-46da-a7d3-549b7aa0e398",
+ "name": "audience resolve",
+ "protocol": "openid-connect",
+ "protocolMapper": "oidc-audience-resolve-mapper",
+ "consentRequired": false,
+ "config": {}
+ }
+ ],
+ "defaultClientScopes": [
+ "web-origins",
+ "profile",
+ "roles",
+ "email"
+ ],
+ "optionalClientScopes": [
+ "address",
+ "phone",
+ "offline_access",
+ "microprofile-jwt"
+ ]
+ },
+ {
+ "id": "0dc11c0a-b159-4e48-bdf9-31a1fccd25c6",
+ "clientId": "admin-cli",
+ "name": "${client_admin-cli}",
+ "surrogateAuthRequired": false,
+ "enabled": true,
+ "alwaysDisplayInConsole": false,
+ "clientAuthenticatorType": "client-secret",
+ "secret": "**********",
+ "redirectUris": [],
+ "webOrigins": [],
+ "notBefore": 0,
+ "bearerOnly": false,
+ "consentRequired": false,
+ "standardFlowEnabled": false,
+ "implicitFlowEnabled": false,
+ "directAccessGrantsEnabled": true,
+ "serviceAccountsEnabled": false,
+ "publicClient": true,
+ "frontchannelLogout": false,
+ "protocol": "openid-connect",
+ "attributes": {
+ "post.logout.redirect.uris": "+"
+ },
+ "authenticationFlowBindingOverrides": {},
+ "fullScopeAllowed": false,
+ "nodeReRegistrationTimeout": 0,
+ "defaultClientScopes": [
+ "web-origins",
+ "profile",
+ "roles",
+ "email"
+ ],
+ "optionalClientScopes": [
+ "address",
+ "phone",
+ "offline_access",
+ "microprofile-jwt"
+ ]
+ },
+ {
+ "id": "c8a751e8-08be-427f-9191-3bdc0cc3e829",
+ "clientId": "broker",
+ "name": "${client_broker}",
+ "surrogateAuthRequired": false,
+ "enabled": true,
+ "alwaysDisplayInConsole": false,
+ "clientAuthenticatorType": "client-secret",
+ "secret": "**********",
+ "redirectUris": [],
+ "webOrigins": [],
+ "notBefore": 0,
+ "bearerOnly": false,
+ "consentRequired": false,
+ "standardFlowEnabled": true,
+ "implicitFlowEnabled": false,
+ "directAccessGrantsEnabled": false,
+ "serviceAccountsEnabled": false,
+ "publicClient": false,
+ "frontchannelLogout": false,
+ "protocol": "openid-connect",
+ "attributes": {
+ "post.logout.redirect.uris": "+"
+ },
+ "authenticationFlowBindingOverrides": {},
+ "fullScopeAllowed": false,
+ "nodeReRegistrationTimeout": 0,
+ "defaultClientScopes": [
+ "web-origins",
+ "profile",
+ "roles",
+ "email"
+ ],
+ "optionalClientScopes": [
+ "address",
+ "phone",
+ "offline_access",
+ "microprofile-jwt"
+ ]
+ },
+ {
+ "id": "2d7384c7-9301-4a57-8fb5-b42aa43b8d3f",
+ "clientId": "dashboard-client",
+ "surrogateAuthRequired": false,
+ "enabled": true,
+ "alwaysDisplayInConsole": false,
+ "clientAuthenticatorType": "client-secret",
+ "secret": "**********",
+ "redirectUris": [
+ "http://localhost:8180/*"
+ ],
+ "webOrigins": [],
+ "notBefore": 0,
+ "bearerOnly": false,
+ "consentRequired": false,
+ "standardFlowEnabled": true,
+ "implicitFlowEnabled": false,
+ "directAccessGrantsEnabled": true,
+ "serviceAccountsEnabled": true,
+ "authorizationServicesEnabled": true,
+ "publicClient": false,
+ "frontchannelLogout": false,
+ "protocol": "openid-connect",
+ "attributes": {
+ "saml.assertion.signature": "false",
+ "saml.force.post.binding": "false",
+ "saml.multivalued.roles": "false",
+ "saml.encrypt": "false",
+ "post.logout.redirect.uris": "+",
+ "saml.server.signature": "false",
+ "saml.server.signature.keyinfo.ext": "false",
+ "exclude.session.state.from.auth.response": "false",
+ "saml_force_name_id_format": "false",
+ "saml.client.signature": "false",
+ "tls.client.certificate.bound.access.tokens": "false",
+ "saml.authnstatement": "false",
+ "display.on.consent.screen": "false",
+ "saml.onetimeuse.condition": "false"
+ },
+ "authenticationFlowBindingOverrides": {},
+ "fullScopeAllowed": true,
+ "nodeReRegistrationTimeout": -1,
+ "protocolMappers": [
+ {
+ "id": "5746e878-a248-4170-9f6e-221dad215e25",
+ "name": "Client ID",
+ "protocol": "openid-connect",
+ "protocolMapper": "oidc-usersessionmodel-note-mapper",
+ "consentRequired": false,
+ "config": {
+ "user.session.note": "clientId",
+ "userinfo.token.claim": "true",
+ "id.token.claim": "true",
+ "access.token.claim": "true",
+ "claim.name": "clientId",
+ "jsonType.label": "String"
+ }
+ },
+ {
+ "id": "e584082b-a232-45bd-8520-bc88908642a1",
+ "name": "Client IP Address",
+ "protocol": "openid-connect",
+ "protocolMapper": "oidc-usersessionmodel-note-mapper",
+ "consentRequired": false,
+ "config": {
+ "user.session.note": "clientAddress",
+ "userinfo.token.claim": "true",
+ "id.token.claim": "true",
+ "access.token.claim": "true",
+ "claim.name": "clientAddress",
+ "jsonType.label": "String"
+ }
+ },
+ {
+ "id": "bd9eaacb-6c5b-4bf1-bc0d-2457f7f7a767",
+ "name": "api-key",
+ "protocol": "openid-connect",
+ "protocolMapper": "oidc-usermodel-attribute-mapper",
+ "consentRequired": false,
+ "config": {
+ "userinfo.token.claim": "true",
+ "user.attribute": "api-key",
+ "id.token.claim": "true",
+ "access.token.claim": "true",
+ "claim.name": "api-key",
+ "jsonType.label": "String"
+ }
+ },
+ {
+ "id": "a10834b6-005a-4083-84e7-69ea2c08c0a8",
+ "name": "Client Host",
+ "protocol": "openid-connect",
+ "protocolMapper": "oidc-usersessionmodel-note-mapper",
+ "consentRequired": false,
+ "config": {
+ "user.session.note": "clientHost",
+ "userinfo.token.claim": "true",
+ "id.token.claim": "true",
+ "access.token.claim": "true",
+ "claim.name": "clientHost",
+ "jsonType.label": "String"
+ }
+ }
+ ],
+ "defaultClientScopes": [
+ "web-origins",
+ "profile",
+ "roles",
+ "email"
+ ],
+ "optionalClientScopes": [
+ "address",
+ "phone",
+ "offline_access",
+ "microprofile-jwt"
+ ],
+ "authorizationSettings": {
+ "allowRemoteResourceManagement": true,
+ "policyEnforcementMode": "ENFORCING",
+ "resources": [
+ {
+ "name": "Default Resource",
+ "type": "urn:dashboard-client:resources:default",
+ "ownerManagedAccess": false,
+ "attributes": {},
+ "_id": "fd85dada-073c-4da0-ac3c-73a823e86e70",
+ "uris": [
+ "/*"
+ ]
+ }
+ ],
+ "policies": [],
+ "scopes": [],
+ "decisionStrategy": "UNANIMOUS"
+ }
+ },
+ {
+ "id": "1a91181f-823b-4cbf-9d7a-f5f097a00d73",
+ "clientId": "realm-management",
+ "name": "${client_realm-management}",
+ "surrogateAuthRequired": false,
+ "enabled": true,
+ "alwaysDisplayInConsole": false,
+ "clientAuthenticatorType": "client-secret",
+ "secret": "**********",
+ "redirectUris": [],
+ "webOrigins": [],
+ "notBefore": 0,
+ "bearerOnly": true,
+ "consentRequired": false,
+ "standardFlowEnabled": true,
+ "implicitFlowEnabled": false,
+ "directAccessGrantsEnabled": false,
+ "serviceAccountsEnabled": false,
+ "publicClient": false,
+ "frontchannelLogout": false,
+ "protocol": "openid-connect",
+ "attributes": {
+ "post.logout.redirect.uris": "+"
+ },
+ "authenticationFlowBindingOverrides": {},
+ "fullScopeAllowed": false,
+ "nodeReRegistrationTimeout": 0,
+ "defaultClientScopes": [
+ "web-origins",
+ "profile",
+ "roles",
+ "email"
+ ],
+ "optionalClientScopes": [
+ "address",
+ "phone",
+ "offline_access",
+ "microprofile-jwt"
+ ]
+ },
+ {
+ "id": "1c10f8e8-6553-4d39-a705-8380214a01c9",
+ "clientId": "security-admin-console",
+ "name": "${client_security-admin-console}",
+ "rootUrl": "${authAdminUrl}",
+ "baseUrl": "/admin/cerc/console/",
+ "surrogateAuthRequired": false,
+ "enabled": true,
+ "alwaysDisplayInConsole": false,
+ "clientAuthenticatorType": "client-secret",
+ "secret": "**********",
+ "redirectUris": [
+ "/admin/cerc/console/*"
+ ],
+ "webOrigins": [
+ "+"
+ ],
+ "notBefore": 0,
+ "bearerOnly": false,
+ "consentRequired": false,
+ "standardFlowEnabled": true,
+ "implicitFlowEnabled": false,
+ "directAccessGrantsEnabled": false,
+ "serviceAccountsEnabled": false,
+ "publicClient": true,
+ "frontchannelLogout": false,
+ "protocol": "openid-connect",
+ "attributes": {
+ "post.logout.redirect.uris": "+",
+ "pkce.code.challenge.method": "S256"
+ },
+ "authenticationFlowBindingOverrides": {},
+ "fullScopeAllowed": false,
+ "nodeReRegistrationTimeout": 0,
+ "protocolMappers": [
+ {
+ "id": "e65eaf73-6a5d-44da-a129-930481351e5e",
+ "name": "locale",
+ "protocol": "openid-connect",
+ "protocolMapper": "oidc-usermodel-attribute-mapper",
+ "consentRequired": false,
+ "config": {
+ "userinfo.token.claim": "true",
+ "user.attribute": "locale",
+ "id.token.claim": "true",
+ "access.token.claim": "true",
+ "claim.name": "locale",
+ "jsonType.label": "String"
+ }
+ }
+ ],
+ "defaultClientScopes": [
+ "web-origins",
+ "profile",
+ "roles",
+ "email"
+ ],
+ "optionalClientScopes": [
+ "address",
+ "phone",
+ "offline_access",
+ "microprofile-jwt"
+ ]
+ }
+ ],
+ "clientScopes": [
+ {
+ "id": "6e3c0398-187d-4515-9fad-e09225e6484c",
+ "name": "email",
+ "description": "OpenID Connect built-in scope: email",
+ "protocol": "openid-connect",
+ "attributes": {
+ "include.in.token.scope": "true",
+ "display.on.consent.screen": "true",
+ "consent.screen.text": "${emailScopeConsentText}"
+ },
+ "protocolMappers": [
+ {
+ "id": "7e81f77f-8631-46a0-979a-7744ea451880",
+ "name": "email",
+ "protocol": "openid-connect",
+ "protocolMapper": "oidc-usermodel-property-mapper",
+ "consentRequired": false,
+ "config": {
+ "userinfo.token.claim": "true",
+ "user.attribute": "email",
+ "id.token.claim": "true",
+ "access.token.claim": "true",
+ "claim.name": "email",
+ "jsonType.label": "String"
+ }
+ },
+ {
+ "id": "b41d73c7-5ae4-4492-9f05-fe737bbd8a9b",
+ "name": "email verified",
+ "protocol": "openid-connect",
+ "protocolMapper": "oidc-usermodel-property-mapper",
+ "consentRequired": false,
+ "config": {
+ "userinfo.token.claim": "true",
+ "user.attribute": "emailVerified",
+ "id.token.claim": "true",
+ "access.token.claim": "true",
+ "claim.name": "email_verified",
+ "jsonType.label": "boolean"
+ }
+ }
+ ]
+ },
+ {
+ "id": "42c276ef-e93e-4e65-a963-b84a7b229449",
+ "name": "microprofile-jwt",
+ "description": "Microprofile - JWT built-in scope",
+ "protocol": "openid-connect",
+ "attributes": {
+ "include.in.token.scope": "true",
+ "display.on.consent.screen": "false"
+ },
+ "protocolMappers": [
+ {
+ "id": "145a68c2-075a-417c-bafb-824c0bb02dd2",
+ "name": "groups",
+ "protocol": "openid-connect",
+ "protocolMapper": "oidc-usermodel-realm-role-mapper",
+ "consentRequired": false,
+ "config": {
+ "multivalued": "true",
+ "userinfo.token.claim": "true",
+ "user.attribute": "foo",
+ "id.token.claim": "true",
+ "access.token.claim": "true",
+ "claim.name": "groups",
+ "jsonType.label": "String"
+ }
+ },
+ {
+ "id": "d9f7cb53-ae29-47e0-aaf8-edd40acfa5b9",
+ "name": "upn",
+ "protocol": "openid-connect",
+ "protocolMapper": "oidc-usermodel-property-mapper",
+ "consentRequired": false,
+ "config": {
+ "userinfo.token.claim": "true",
+ "user.attribute": "username",
+ "id.token.claim": "true",
+ "access.token.claim": "true",
+ "claim.name": "upn",
+ "jsonType.label": "String"
+ }
+ }
+ ]
+ },
+ {
+ "id": "c88a720f-8fe6-4750-81b6-b87551066905",
+ "name": "role_list",
+ "description": "SAML role list",
+ "protocol": "saml",
+ "attributes": {
+ "consent.screen.text": "${samlRoleListScopeConsentText}",
+ "display.on.consent.screen": "true"
+ },
+ "protocolMappers": [
+ {
+ "id": "0244f0c4-773e-40e3-a0e4-308f5b10ab78",
+ "name": "role list",
+ "protocol": "saml",
+ "protocolMapper": "saml-role-list-mapper",
+ "consentRequired": false,
+ "config": {
+ "single": "false",
+ "attribute.nameformat": "Basic",
+ "attribute.name": "Role"
+ }
+ }
+ ]
+ },
+ {
+ "id": "ba66e4d5-12f9-4c44-921d-42d901485803",
+ "name": "web-origins",
+ "description": "OpenID Connect scope for add allowed web origins to the access token",
+ "protocol": "openid-connect",
+ "attributes": {
+ "include.in.token.scope": "false",
+ "display.on.consent.screen": "false",
+ "consent.screen.text": ""
+ },
+ "protocolMappers": [
+ {
+ "id": "0f2f1ccf-7292-4e49-a079-d9166ec100bb",
+ "name": "allowed web origins",
+ "protocol": "openid-connect",
+ "protocolMapper": "oidc-allowed-origins-mapper",
+ "consentRequired": false,
+ "config": {}
+ }
+ ]
+ },
+ {
+ "id": "e73a3670-4958-43bc-b5fa-982a895bc8d4",
+ "name": "phone",
+ "description": "OpenID Connect built-in scope: phone",
+ "protocol": "openid-connect",
+ "attributes": {
+ "include.in.token.scope": "true",
+ "display.on.consent.screen": "true",
+ "consent.screen.text": "${phoneScopeConsentText}"
+ },
+ "protocolMappers": [
+ {
+ "id": "bf04e15d-711a-4f66-b6f4-c35f21fcb0c8",
+ "name": "phone number",
+ "protocol": "openid-connect",
+ "protocolMapper": "oidc-usermodel-attribute-mapper",
+ "consentRequired": false,
+ "config": {
+ "userinfo.token.claim": "true",
+ "user.attribute": "phoneNumber",
+ "id.token.claim": "true",
+ "access.token.claim": "true",
+ "claim.name": "phone_number",
+ "jsonType.label": "String"
+ }
+ },
+ {
+ "id": "76493880-66bf-40d9-8f41-b14a8d400b1d",
+ "name": "phone number verified",
+ "protocol": "openid-connect",
+ "protocolMapper": "oidc-usermodel-attribute-mapper",
+ "consentRequired": false,
+ "config": {
+ "userinfo.token.claim": "true",
+ "user.attribute": "phoneNumberVerified",
+ "id.token.claim": "true",
+ "access.token.claim": "true",
+ "claim.name": "phone_number_verified",
+ "jsonType.label": "boolean"
+ }
+ }
+ ]
+ },
+ {
+ "id": "5cb4b2c4-880e-4437-b905-19a5eb471765",
+ "name": "profile",
+ "description": "OpenID Connect built-in scope: profile",
+ "protocol": "openid-connect",
+ "attributes": {
+ "include.in.token.scope": "true",
+ "display.on.consent.screen": "true",
+ "consent.screen.text": "${profileScopeConsentText}"
+ },
+ "protocolMappers": [
+ {
+ "id": "f7ba27e0-141e-4389-93d2-cc6c5fb1f78a",
+ "name": "family name",
+ "protocol": "openid-connect",
+ "protocolMapper": "oidc-usermodel-property-mapper",
+ "consentRequired": false,
+ "config": {
+ "userinfo.token.claim": "true",
+ "user.attribute": "lastName",
+ "id.token.claim": "true",
+ "access.token.claim": "true",
+ "claim.name": "family_name",
+ "jsonType.label": "String"
+ }
+ },
+ {
+ "id": "f3c2b39e-a11b-4640-acb3-c6ce139235e5",
+ "name": "zoneinfo",
+ "protocol": "openid-connect",
+ "protocolMapper": "oidc-usermodel-attribute-mapper",
+ "consentRequired": false,
+ "config": {
+ "userinfo.token.claim": "true",
+ "user.attribute": "zoneinfo",
+ "id.token.claim": "true",
+ "access.token.claim": "true",
+ "claim.name": "zoneinfo",
+ "jsonType.label": "String"
+ }
+ },
+ {
+ "id": "bbf1c241-15c1-4d94-812a-ad4e260f77df",
+ "name": "picture",
+ "protocol": "openid-connect",
+ "protocolMapper": "oidc-usermodel-attribute-mapper",
+ "consentRequired": false,
+ "config": {
+ "userinfo.token.claim": "true",
+ "user.attribute": "picture",
+ "id.token.claim": "true",
+ "access.token.claim": "true",
+ "claim.name": "picture",
+ "jsonType.label": "String"
+ }
+ },
+ {
+ "id": "0dc161e8-f2e8-4017-b895-c24a78d38e92",
+ "name": "birthdate",
+ "protocol": "openid-connect",
+ "protocolMapper": "oidc-usermodel-attribute-mapper",
+ "consentRequired": false,
+ "config": {
+ "userinfo.token.claim": "true",
+ "user.attribute": "birthdate",
+ "id.token.claim": "true",
+ "access.token.claim": "true",
+ "claim.name": "birthdate",
+ "jsonType.label": "String"
+ }
+ },
+ {
+ "id": "86761664-57a4-47df-a891-d0d721243327",
+ "name": "nickname",
+ "protocol": "openid-connect",
+ "protocolMapper": "oidc-usermodel-attribute-mapper",
+ "consentRequired": false,
+ "config": {
+ "userinfo.token.claim": "true",
+ "user.attribute": "nickname",
+ "id.token.claim": "true",
+ "access.token.claim": "true",
+ "claim.name": "nickname",
+ "jsonType.label": "String"
+ }
+ },
+ {
+ "id": "20f086d5-a07c-4711-88aa-3396fafb2adf",
+ "name": "full name",
+ "protocol": "openid-connect",
+ "protocolMapper": "oidc-full-name-mapper",
+ "consentRequired": false,
+ "config": {
+ "id.token.claim": "true",
+ "access.token.claim": "true",
+ "userinfo.token.claim": "true"
+ }
+ },
+ {
+ "id": "d79a8b71-9312-4658-b14b-8f3145052116",
+ "name": "username",
+ "protocol": "openid-connect",
+ "protocolMapper": "oidc-usermodel-property-mapper",
+ "consentRequired": false,
+ "config": {
+ "userinfo.token.claim": "true",
+ "user.attribute": "username",
+ "id.token.claim": "true",
+ "access.token.claim": "true",
+ "claim.name": "preferred_username",
+ "jsonType.label": "String"
+ }
+ },
+ {
+ "id": "ad342e65-e36a-48cc-a90a-d48aacefab01",
+ "name": "profile",
+ "protocol": "openid-connect",
+ "protocolMapper": "oidc-usermodel-attribute-mapper",
+ "consentRequired": false,
+ "config": {
+ "userinfo.token.claim": "true",
+ "user.attribute": "profile",
+ "id.token.claim": "true",
+ "access.token.claim": "true",
+ "claim.name": "profile",
+ "jsonType.label": "String"
+ }
+ },
+ {
+ "id": "6abd60fb-39b7-4063-aaee-5ff380f0a97e",
+ "name": "gender",
+ "protocol": "openid-connect",
+ "protocolMapper": "oidc-usermodel-attribute-mapper",
+ "consentRequired": false,
+ "config": {
+ "userinfo.token.claim": "true",
+ "user.attribute": "gender",
+ "id.token.claim": "true",
+ "access.token.claim": "true",
+ "claim.name": "gender",
+ "jsonType.label": "String"
+ }
+ },
+ {
+ "id": "580133fc-8e44-4e7a-a526-dcbc7d82c911",
+ "name": "website",
+ "protocol": "openid-connect",
+ "protocolMapper": "oidc-usermodel-attribute-mapper",
+ "consentRequired": false,
+ "config": {
+ "userinfo.token.claim": "true",
+ "user.attribute": "website",
+ "id.token.claim": "true",
+ "access.token.claim": "true",
+ "claim.name": "website",
+ "jsonType.label": "String"
+ }
+ },
+ {
+ "id": "4bcde3c0-41ef-45e6-a23b-aea222640399",
+ "name": "locale",
+ "protocol": "openid-connect",
+ "protocolMapper": "oidc-usermodel-attribute-mapper",
+ "consentRequired": false,
+ "config": {
+ "userinfo.token.claim": "true",
+ "user.attribute": "locale",
+ "id.token.claim": "true",
+ "access.token.claim": "true",
+ "claim.name": "locale",
+ "jsonType.label": "String"
+ }
+ },
+ {
+ "id": "1ed7844e-9002-4c7b-be3d-61f9b3c725b9",
+ "name": "middle name",
+ "protocol": "openid-connect",
+ "protocolMapper": "oidc-usermodel-attribute-mapper",
+ "consentRequired": false,
+ "config": {
+ "userinfo.token.claim": "true",
+ "user.attribute": "middleName",
+ "id.token.claim": "true",
+ "access.token.claim": "true",
+ "claim.name": "middle_name",
+ "jsonType.label": "String"
+ }
+ },
+ {
+ "id": "4c9e9ec5-f40d-4b6e-9385-f86b0d228940",
+ "name": "updated at",
+ "protocol": "openid-connect",
+ "protocolMapper": "oidc-usermodel-attribute-mapper",
+ "consentRequired": false,
+ "config": {
+ "userinfo.token.claim": "true",
+ "user.attribute": "updatedAt",
+ "id.token.claim": "true",
+ "access.token.claim": "true",
+ "claim.name": "updated_at",
+ "jsonType.label": "String"
+ }
+ },
+ {
+ "id": "bf10082b-d485-4cf4-bf31-f0181884e8cf",
+ "name": "given name",
+ "protocol": "openid-connect",
+ "protocolMapper": "oidc-usermodel-property-mapper",
+ "consentRequired": false,
+ "config": {
+ "userinfo.token.claim": "true",
+ "user.attribute": "firstName",
+ "id.token.claim": "true",
+ "access.token.claim": "true",
+ "claim.name": "given_name",
+ "jsonType.label": "String"
+ }
+ }
+ ]
+ },
+ {
+ "id": "b25abfe5-1130-4d7d-98f4-227f8b0dc4f9",
+ "name": "roles",
+ "description": "OpenID Connect scope for add user roles to the access token",
+ "protocol": "openid-connect",
+ "attributes": {
+ "include.in.token.scope": "false",
+ "display.on.consent.screen": "true",
+ "consent.screen.text": "${rolesScopeConsentText}"
+ },
+ "protocolMappers": [
+ {
+ "id": "84b22a06-dced-4b2f-bbc8-f818b01c73d0",
+ "name": "realm roles",
+ "protocol": "openid-connect",
+ "protocolMapper": "oidc-usermodel-realm-role-mapper",
+ "consentRequired": false,
+ "config": {
+ "user.attribute": "foo",
+ "access.token.claim": "true",
+ "claim.name": "realm_access.roles",
+ "jsonType.label": "String",
+ "multivalued": "true"
+ }
+ },
+ {
+ "id": "5c6ed3cf-0840-4191-81ea-7092569f70fe",
+ "name": "audience resolve",
+ "protocol": "openid-connect",
+ "protocolMapper": "oidc-audience-resolve-mapper",
+ "consentRequired": false,
+ "config": {}
+ },
+ {
+ "id": "dce34b2a-e58f-41b8-86ab-794edeccae40",
+ "name": "client roles",
+ "protocol": "openid-connect",
+ "protocolMapper": "oidc-usermodel-client-role-mapper",
+ "consentRequired": false,
+ "config": {
+ "user.attribute": "foo",
+ "access.token.claim": "true",
+ "claim.name": "resource_access.${client_id}.roles",
+ "jsonType.label": "String",
+ "multivalued": "true"
+ }
+ }
+ ]
+ },
+ {
+ "id": "00476d55-cd2f-4f60-92dd-6f3ff634799e",
+ "name": "acr",
+ "description": "OpenID Connect scope for add acr (authentication context class reference) to the token",
+ "protocol": "openid-connect",
+ "attributes": {
+ "include.in.token.scope": "false",
+ "display.on.consent.screen": "false"
+ },
+ "protocolMappers": [
+ {
+ "id": "f0ae1247-2120-4513-b1d1-fab7cfecfbb8",
+ "name": "acr loa level",
+ "protocol": "openid-connect",
+ "protocolMapper": "oidc-acr-mapper",
+ "consentRequired": false,
+ "config": {
+ "id.token.claim": "true",
+ "access.token.claim": "true",
+ "userinfo.token.claim": "true"
+ }
+ }
+ ]
+ },
+ {
+ "id": "3f68af4c-10e8-4351-a62d-f829b9832037",
+ "name": "address",
+ "description": "OpenID Connect built-in scope: address",
+ "protocol": "openid-connect",
+ "attributes": {
+ "include.in.token.scope": "true",
+ "display.on.consent.screen": "true",
+ "consent.screen.text": "${addressScopeConsentText}"
+ },
+ "protocolMappers": [
+ {
+ "id": "3037e6e9-e1d7-492c-a060-9b2c35c688cb",
+ "name": "address",
+ "protocol": "openid-connect",
+ "protocolMapper": "oidc-address-mapper",
+ "consentRequired": false,
+ "config": {
+ "user.attribute.formatted": "formatted",
+ "user.attribute.country": "country",
+ "user.attribute.postal_code": "postal_code",
+ "userinfo.token.claim": "true",
+ "user.attribute.street": "street",
+ "id.token.claim": "true",
+ "user.attribute.region": "region",
+ "access.token.claim": "true",
+ "user.attribute.locality": "locality"
+ }
+ }
+ ]
+ },
+ {
+ "id": "f900704b-5e92-451e-b093-02286cc22774",
+ "name": "offline_access",
+ "description": "OpenID Connect built-in scope: offline_access",
+ "protocol": "openid-connect",
+ "attributes": {
+ "consent.screen.text": "${offlineAccessScopeConsentText}",
+ "display.on.consent.screen": "true"
+ }
+ }
+ ],
+ "defaultDefaultClientScopes": [
+ "profile",
+ "email",
+ "roles",
+ "web-origins",
+ "role_list",
+ "acr"
+ ],
+ "defaultOptionalClientScopes": [
+ "address",
+ "microprofile-jwt",
+ "phone",
+ "offline_access"
+ ],
+ "browserSecurityHeaders": {
+ "contentSecurityPolicyReportOnly": "",
+ "xContentTypeOptions": "nosniff",
+ "xRobotsTag": "none",
+ "xFrameOptions": "SAMEORIGIN",
+ "contentSecurityPolicy": "frame-src 'self'; frame-ancestors 'self'; object-src 'none';",
+ "xXSSProtection": "1; mode=block",
+ "strictTransportSecurity": "max-age=31536000; includeSubDomains"
+ },
+ "smtpServer": {},
+ "accountTheme": "custom",
+ "eventsEnabled": true,
+ "eventsExpiration": 604800,
+ "eventsListeners": [
+ "api-key-registration-generation",
+ "metrics-listener",
+ "jboss-logging"
+ ],
+ "enabledEventTypes": [
+ "SEND_RESET_PASSWORD",
+ "REMOVE_TOTP",
+ "REVOKE_GRANT",
+ "UPDATE_TOTP",
+ "LOGIN_ERROR",
+ "CLIENT_LOGIN",
+ "RESET_PASSWORD_ERROR",
+ "IMPERSONATE_ERROR",
+ "CODE_TO_TOKEN_ERROR",
+ "CUSTOM_REQUIRED_ACTION",
+ "RESTART_AUTHENTICATION",
+ "IMPERSONATE",
+ "UPDATE_PROFILE_ERROR",
+ "LOGIN",
+ "UPDATE_PASSWORD_ERROR",
+ "CLIENT_INITIATED_ACCOUNT_LINKING",
+ "TOKEN_EXCHANGE",
+ "LOGOUT",
+ "REGISTER",
+ "CLIENT_REGISTER",
+ "IDENTITY_PROVIDER_LINK_ACCOUNT",
+ "UPDATE_PASSWORD",
+ "CLIENT_DELETE",
+ "FEDERATED_IDENTITY_LINK_ERROR",
+ "IDENTITY_PROVIDER_FIRST_LOGIN",
+ "CLIENT_DELETE_ERROR",
+ "VERIFY_EMAIL",
+ "CLIENT_LOGIN_ERROR",
+ "RESTART_AUTHENTICATION_ERROR",
+ "EXECUTE_ACTIONS",
+ "REMOVE_FEDERATED_IDENTITY_ERROR",
+ "TOKEN_EXCHANGE_ERROR",
+ "PERMISSION_TOKEN",
+ "SEND_IDENTITY_PROVIDER_LINK_ERROR",
+ "EXECUTE_ACTION_TOKEN_ERROR",
+ "SEND_VERIFY_EMAIL",
+ "EXECUTE_ACTIONS_ERROR",
+ "REMOVE_FEDERATED_IDENTITY",
+ "IDENTITY_PROVIDER_POST_LOGIN",
+ "IDENTITY_PROVIDER_LINK_ACCOUNT_ERROR",
+ "UPDATE_EMAIL",
+ "REGISTER_ERROR",
+ "REVOKE_GRANT_ERROR",
+ "EXECUTE_ACTION_TOKEN",
+ "LOGOUT_ERROR",
+ "UPDATE_EMAIL_ERROR",
+ "CLIENT_UPDATE_ERROR",
+ "UPDATE_PROFILE",
+ "CLIENT_REGISTER_ERROR",
+ "FEDERATED_IDENTITY_LINK",
+ "SEND_IDENTITY_PROVIDER_LINK",
+ "SEND_VERIFY_EMAIL_ERROR",
+ "RESET_PASSWORD",
+ "CLIENT_INITIATED_ACCOUNT_LINKING_ERROR",
+ "REMOVE_TOTP_ERROR",
+ "VERIFY_EMAIL_ERROR",
+ "SEND_RESET_PASSWORD_ERROR",
+ "CLIENT_UPDATE",
+ "CUSTOM_REQUIRED_ACTION_ERROR",
+ "IDENTITY_PROVIDER_POST_LOGIN_ERROR",
+ "UPDATE_TOTP_ERROR",
+ "CODE_TO_TOKEN",
+ "IDENTITY_PROVIDER_FIRST_LOGIN_ERROR"
+ ],
+ "adminEventsEnabled": false,
+ "adminEventsDetailsEnabled": false,
+ "identityProviders": [],
+ "identityProviderMappers": [],
+ "components": {
+ "org.keycloak.services.clientregistration.policy.ClientRegistrationPolicy": [
+ {
+ "id": "e108ed9d-422a-4c84-af0e-d7ea9ddc1890",
+ "name": "Trusted Hosts",
+ "providerId": "trusted-hosts",
+ "subType": "anonymous",
+ "subComponents": {},
+ "config": {
+ "host-sending-registration-request-must-match": [
+ "true"
+ ],
+ "client-uris-must-match": [
+ "true"
+ ]
+ }
+ },
+ {
+ "id": "b9a4a7f7-d684-45bd-b4bf-646be1f79364",
+ "name": "Full Scope Disabled",
+ "providerId": "scope",
+ "subType": "anonymous",
+ "subComponents": {},
+ "config": {}
+ },
+ {
+ "id": "8df4222e-0b62-44dc-be51-f27d828f0f66",
+ "name": "Allowed Protocol Mapper Types",
+ "providerId": "allowed-protocol-mappers",
+ "subType": "anonymous",
+ "subComponents": {},
+ "config": {
+ "allowed-protocol-mapper-types": [
+ "oidc-full-name-mapper",
+ "oidc-address-mapper",
+ "oidc-sha256-pairwise-sub-mapper",
+ "saml-user-attribute-mapper",
+ "saml-user-property-mapper",
+ "saml-role-list-mapper",
+ "oidc-usermodel-attribute-mapper",
+ "oidc-usermodel-property-mapper"
+ ]
+ }
+ },
+ {
+ "id": "59dd3e18-4dbe-4054-b012-423e8c4da909",
+ "name": "Max Clients Limit",
+ "providerId": "max-clients",
+ "subType": "anonymous",
+ "subComponents": {},
+ "config": {
+ "max-clients": [
+ "200"
+ ]
+ }
+ },
+ {
+ "id": "7ce212c8-2587-4f6c-8824-705eabb7f925",
+ "name": "Allowed Client Scopes",
+ "providerId": "allowed-client-templates",
+ "subType": "authenticated",
+ "subComponents": {},
+ "config": {
+ "allow-default-scopes": [
+ "true"
+ ]
+ }
+ },
+ {
+ "id": "4cbfdd25-6c33-4bad-8d88-9a1aec6c8e25",
+ "name": "Consent Required",
+ "providerId": "consent-required",
+ "subType": "anonymous",
+ "subComponents": {},
+ "config": {}
+ },
+ {
+ "id": "bd008843-3c81-4750-ae85-a5e4e181b877",
+ "name": "Allowed Protocol Mapper Types",
+ "providerId": "allowed-protocol-mappers",
+ "subType": "authenticated",
+ "subComponents": {},
+ "config": {
+ "allowed-protocol-mapper-types": [
+ "saml-role-list-mapper",
+ "saml-user-property-mapper",
+ "oidc-full-name-mapper",
+ "oidc-usermodel-attribute-mapper",
+ "oidc-address-mapper",
+ "oidc-usermodel-property-mapper",
+ "oidc-sha256-pairwise-sub-mapper",
+ "saml-user-attribute-mapper"
+ ]
+ }
+ },
+ {
+ "id": "2edf8e74-e1b6-4e6d-83a3-c1123d462d14",
+ "name": "Allowed Client Scopes",
+ "providerId": "allowed-client-templates",
+ "subType": "anonymous",
+ "subComponents": {},
+ "config": {
+ "allow-default-scopes": [
+ "true"
+ ]
+ }
+ }
+ ],
+ "org.keycloak.userprofile.UserProfileProvider": [
+ {
+ "id": "bfd8d11c-d90c-4620-802d-2b5bb04ed9d3",
+ "providerId": "declarative-user-profile",
+ "subComponents": {},
+ "config": {}
+ }
+ ],
+ "org.keycloak.keys.KeyProvider": [
+ {
+ "id": "ca2afd56-df5d-47ab-bea4-4416c859a338",
+ "name": "hmac-generated",
+ "providerId": "hmac-generated",
+ "subComponents": {},
+ "config": {
+ "priority": [
+ "100"
+ ],
+ "algorithm": [
+ "HS256"
+ ]
+ }
+ },
+ {
+ "id": "c72d323d-5737-4bed-bbc9-41be440e99fb",
+ "name": "rsa-generated",
+ "providerId": "rsa-generated",
+ "subComponents": {},
+ "config": {
+ "priority": [
+ "100"
+ ]
+ }
+ },
+ {
+ "id": "f80ab6e7-1b0a-4de4-acaa-3275d3f867a2",
+ "name": "aes-generated",
+ "providerId": "aes-generated",
+ "subComponents": {},
+ "config": {
+ "priority": [
+ "100"
+ ]
+ }
+ }
+ ]
+ },
+ "internationalizationEnabled": false,
+ "supportedLocales": [],
+ "authenticationFlows": [
+ {
+ "id": "4a7f9376-0b32-482d-acf0-49080e4af5bb",
+ "alias": "Handle Existing Account",
+ "description": "Handle what to do if there is existing account with same email/username like authenticated identity provider",
+ "providerId": "basic-flow",
+ "topLevel": false,
+ "builtIn": true,
+ "authenticationExecutions": [
+ {
+ "authenticator": "idp-confirm-link",
+ "authenticatorFlow": false,
+ "requirement": "REQUIRED",
+ "priority": 10,
+ "autheticatorFlow": false,
+ "userSetupAllowed": false
+ },
+ {
+ "authenticatorFlow": true,
+ "requirement": "REQUIRED",
+ "priority": 20,
+ "autheticatorFlow": true,
+ "flowAlias": "Handle Existing Account - Alternatives - 0",
+ "userSetupAllowed": false
+ }
+ ]
+ },
+ {
+ "id": "258bc22b-74bd-450d-b2c0-5110b16a690c",
+ "alias": "Handle Existing Account - Alternatives - 0",
+ "description": "Subflow of Handle Existing Account with alternative executions",
+ "providerId": "basic-flow",
+ "topLevel": false,
+ "builtIn": true,
+ "authenticationExecutions": [
+ {
+ "authenticator": "idp-email-verification",
+ "authenticatorFlow": false,
+ "requirement": "ALTERNATIVE",
+ "priority": 10,
+ "autheticatorFlow": false,
+ "userSetupAllowed": false
+ },
+ {
+ "authenticatorFlow": true,
+ "requirement": "ALTERNATIVE",
+ "priority": 20,
+ "autheticatorFlow": true,
+ "flowAlias": "Verify Existing Account by Re-authentication",
+ "userSetupAllowed": false
+ }
+ ]
+ },
+ {
+ "id": "b5742967-0bfc-41d8-8738-8c24934d2c7b",
+ "alias": "Verify Existing Account by Re-authentication",
+ "description": "Reauthentication of existing account",
+ "providerId": "basic-flow",
+ "topLevel": false,
+ "builtIn": true,
+ "authenticationExecutions": [
+ {
+ "authenticator": "idp-username-password-form",
+ "authenticatorFlow": false,
+ "requirement": "REQUIRED",
+ "priority": 10,
+ "autheticatorFlow": false,
+ "userSetupAllowed": false
+ },
+ {
+ "authenticatorFlow": true,
+ "requirement": "CONDITIONAL",
+ "priority": 20,
+ "autheticatorFlow": true,
+ "flowAlias": "Verify Existing Account by Re-authentication - auth-otp-form - Conditional",
+ "userSetupAllowed": false
+ }
+ ]
+ },
+ {
+ "id": "cc49251b-8a75-4324-abbe-50bb00384e39",
+ "alias": "Verify Existing Account by Re-authentication - auth-otp-form - Conditional",
+ "description": "Flow to determine if the auth-otp-form authenticator should be used or not.",
+ "providerId": "basic-flow",
+ "topLevel": false,
+ "builtIn": true,
+ "authenticationExecutions": [
+ {
+ "authenticator": "conditional-user-configured",
+ "authenticatorFlow": false,
+ "requirement": "REQUIRED",
+ "priority": 10,
+ "autheticatorFlow": false,
+ "userSetupAllowed": false
+ },
+ {
+ "authenticator": "auth-otp-form",
+ "authenticatorFlow": false,
+ "requirement": "REQUIRED",
+ "priority": 20,
+ "autheticatorFlow": false,
+ "userSetupAllowed": false
+ }
+ ]
+ },
+ {
+ "id": "490a9641-0bea-425f-a04c-457d731489c0",
+ "alias": "browser",
+ "description": "browser based authentication",
+ "providerId": "basic-flow",
+ "topLevel": true,
+ "builtIn": true,
+ "authenticationExecutions": [
+ {
+ "authenticator": "auth-cookie",
+ "authenticatorFlow": false,
+ "requirement": "ALTERNATIVE",
+ "priority": 10,
+ "autheticatorFlow": false,
+ "userSetupAllowed": false
+ },
+ {
+ "authenticator": "auth-spnego",
+ "authenticatorFlow": false,
+ "requirement": "DISABLED",
+ "priority": 20,
+ "autheticatorFlow": false,
+ "userSetupAllowed": false
+ },
+ {
+ "authenticator": "identity-provider-redirector",
+ "authenticatorFlow": false,
+ "requirement": "ALTERNATIVE",
+ "priority": 25,
+ "autheticatorFlow": false,
+ "userSetupAllowed": false
+ },
+ {
+ "authenticatorFlow": true,
+ "requirement": "ALTERNATIVE",
+ "priority": 30,
+ "autheticatorFlow": true,
+ "flowAlias": "forms",
+ "userSetupAllowed": false
+ }
+ ]
+ },
+ {
+ "id": "7efce4d5-b881-4e51-b390-5a40c342b185",
+ "alias": "browser plus basic",
+ "description": "browser based authentication",
+ "providerId": "basic-flow",
+ "topLevel": true,
+ "builtIn": false,
+ "authenticationExecutions": [
+ {
+ "authenticator": "auth-cookie",
+ "authenticatorFlow": false,
+ "requirement": "ALTERNATIVE",
+ "priority": 10,
+ "autheticatorFlow": false,
+ "userSetupAllowed": false
+ },
+ {
+ "authenticator": "basic-auth",
+ "authenticatorFlow": false,
+ "requirement": "ALTERNATIVE",
+ "priority": 20,
+ "autheticatorFlow": false,
+ "userSetupAllowed": false
+ },
+ {
+ "authenticator": "auth-spnego",
+ "authenticatorFlow": false,
+ "requirement": "DISABLED",
+ "priority": 25,
+ "autheticatorFlow": false,
+ "userSetupAllowed": false
+ },
+ {
+ "authenticator": "identity-provider-redirector",
+ "authenticatorFlow": false,
+ "requirement": "ALTERNATIVE",
+ "priority": 30,
+ "autheticatorFlow": false,
+ "userSetupAllowed": false
+ },
+ {
+ "authenticatorFlow": true,
+ "requirement": "ALTERNATIVE",
+ "priority": 31,
+ "autheticatorFlow": true,
+ "flowAlias": "browser plus basic forms",
+ "userSetupAllowed": false
+ }
+ ]
+ },
+ {
+ "id": "4f16e1b0-8531-47eb-8624-4bbf877d5596",
+ "alias": "browser plus basic forms",
+ "description": "Username, password, otp and other auth forms.",
+ "providerId": "basic-flow",
+ "topLevel": false,
+ "builtIn": false,
+ "authenticationExecutions": [
+ {
+ "authenticator": "auth-username-password-form",
+ "authenticatorFlow": false,
+ "requirement": "REQUIRED",
+ "priority": 10,
+ "autheticatorFlow": false,
+ "userSetupAllowed": false
+ },
+ {
+ "authenticatorFlow": true,
+ "requirement": "CONDITIONAL",
+ "priority": 20,
+ "autheticatorFlow": true,
+ "flowAlias": "browser plus basic forms - auth-otp-form - Conditional",
+ "userSetupAllowed": false
+ }
+ ]
+ },
+ {
+ "id": "22ddde40-03fe-425f-9dda-d7e8d108d5a3",
+ "alias": "browser plus basic forms - auth-otp-form - Conditional",
+ "description": "Flow to determine if the auth-otp-form authenticator should be used or not.",
+ "providerId": "basic-flow",
+ "topLevel": false,
+ "builtIn": false,
+ "authenticationExecutions": [
+ {
+ "authenticator": "conditional-user-configured",
+ "authenticatorFlow": false,
+ "requirement": "REQUIRED",
+ "priority": 10,
+ "autheticatorFlow": false,
+ "userSetupAllowed": false
+ },
+ {
+ "authenticator": "auth-otp-form",
+ "authenticatorFlow": false,
+ "requirement": "REQUIRED",
+ "priority": 20,
+ "autheticatorFlow": false,
+ "userSetupAllowed": false
+ }
+ ]
+ },
+ {
+ "id": "8253fd42-58bd-4536-8671-5c68522fa342",
+ "alias": "clients",
+ "description": "Base authentication for clients",
+ "providerId": "client-flow",
+ "topLevel": true,
+ "builtIn": true,
+ "authenticationExecutions": [
+ {
+ "authenticator": "client-secret",
+ "authenticatorFlow": false,
+ "requirement": "ALTERNATIVE",
+ "priority": 10,
+ "autheticatorFlow": false,
+ "userSetupAllowed": false
+ },
+ {
+ "authenticator": "client-jwt",
+ "authenticatorFlow": false,
+ "requirement": "ALTERNATIVE",
+ "priority": 20,
+ "autheticatorFlow": false,
+ "userSetupAllowed": false
+ },
+ {
+ "authenticator": "client-secret-jwt",
+ "authenticatorFlow": false,
+ "requirement": "ALTERNATIVE",
+ "priority": 30,
+ "autheticatorFlow": false,
+ "userSetupAllowed": false
+ },
+ {
+ "authenticator": "client-x509",
+ "authenticatorFlow": false,
+ "requirement": "ALTERNATIVE",
+ "priority": 40,
+ "autheticatorFlow": false,
+ "userSetupAllowed": false
+ }
+ ]
+ },
+ {
+ "id": "04bf48cf-9568-48f4-8f17-a03af2c61419",
+ "alias": "direct grant",
+ "description": "OpenID Connect Resource Owner Grant",
+ "providerId": "basic-flow",
+ "topLevel": true,
+ "builtIn": true,
+ "authenticationExecutions": [
+ {
+ "authenticator": "direct-grant-validate-username",
+ "authenticatorFlow": false,
+ "requirement": "REQUIRED",
+ "priority": 10,
+ "autheticatorFlow": false,
+ "userSetupAllowed": false
+ },
+ {
+ "authenticator": "direct-grant-validate-password",
+ "authenticatorFlow": false,
+ "requirement": "REQUIRED",
+ "priority": 20,
+ "autheticatorFlow": false,
+ "userSetupAllowed": false
+ },
+ {
+ "authenticatorFlow": true,
+ "requirement": "CONDITIONAL",
+ "priority": 30,
+ "autheticatorFlow": true,
+ "flowAlias": "direct grant - direct-grant-validate-otp - Conditional",
+ "userSetupAllowed": false
+ }
+ ]
+ },
+ {
+ "id": "61ad005d-bf91-4794-9842-3ae727a4751c",
+ "alias": "direct grant - direct-grant-validate-otp - Conditional",
+ "description": "Flow to determine if the direct-grant-validate-otp authenticator should be used or not.",
+ "providerId": "basic-flow",
+ "topLevel": false,
+ "builtIn": true,
+ "authenticationExecutions": [
+ {
+ "authenticator": "conditional-user-configured",
+ "authenticatorFlow": false,
+ "requirement": "REQUIRED",
+ "priority": 10,
+ "autheticatorFlow": false,
+ "userSetupAllowed": false
+ },
+ {
+ "authenticator": "direct-grant-validate-otp",
+ "authenticatorFlow": false,
+ "requirement": "REQUIRED",
+ "priority": 20,
+ "autheticatorFlow": false,
+ "userSetupAllowed": false
+ }
+ ]
+ },
+ {
+ "id": "c65324a7-d836-4509-bf0c-12bd7ffcbf2b",
+ "alias": "docker auth",
+ "description": "Used by Docker clients to authenticate against the IDP",
+ "providerId": "basic-flow",
+ "topLevel": true,
+ "builtIn": true,
+ "authenticationExecutions": [
+ {
+ "authenticator": "docker-http-basic-authenticator",
+ "authenticatorFlow": false,
+ "requirement": "REQUIRED",
+ "priority": 10,
+ "autheticatorFlow": false,
+ "userSetupAllowed": false
+ }
+ ]
+ },
+ {
+ "id": "91bf5412-35f7-40ff-9374-e135aa788687",
+ "alias": "first broker login",
+ "description": "Actions taken after first broker login with identity provider account, which is not yet linked to any Keycloak account",
+ "providerId": "basic-flow",
+ "topLevel": true,
+ "builtIn": true,
+ "authenticationExecutions": [
+ {
+ "authenticatorConfig": "review profile config",
+ "authenticator": "idp-review-profile",
+ "authenticatorFlow": false,
+ "requirement": "REQUIRED",
+ "priority": 10,
+ "autheticatorFlow": false,
+ "userSetupAllowed": false
+ },
+ {
+ "authenticatorFlow": true,
+ "requirement": "REQUIRED",
+ "priority": 20,
+ "autheticatorFlow": true,
+ "flowAlias": "first broker login - Alternatives - 0",
+ "userSetupAllowed": false
+ }
+ ]
+ },
+ {
+ "id": "4112f733-14e0-404b-978e-335ecda4a88e",
+ "alias": "first broker login - Alternatives - 0",
+ "description": "Subflow of first broker login with alternative executions",
+ "providerId": "basic-flow",
+ "topLevel": false,
+ "builtIn": true,
+ "authenticationExecutions": [
+ {
+ "authenticatorConfig": "create unique user config",
+ "authenticator": "idp-create-user-if-unique",
+ "authenticatorFlow": false,
+ "requirement": "ALTERNATIVE",
+ "priority": 10,
+ "autheticatorFlow": false,
+ "userSetupAllowed": false
+ },
+ {
+ "authenticatorFlow": true,
+ "requirement": "ALTERNATIVE",
+ "priority": 20,
+ "autheticatorFlow": true,
+ "flowAlias": "Handle Existing Account",
+ "userSetupAllowed": false
+ }
+ ]
+ },
+ {
+ "id": "fc661cc2-942d-4596-84e7-0ab62c6dada2",
+ "alias": "forms",
+ "description": "Username, password, otp and other auth forms.",
+ "providerId": "basic-flow",
+ "topLevel": false,
+ "builtIn": true,
+ "authenticationExecutions": [
+ {
+ "authenticator": "auth-username-password-form",
+ "authenticatorFlow": false,
+ "requirement": "REQUIRED",
+ "priority": 10,
+ "autheticatorFlow": false,
+ "userSetupAllowed": false
+ },
+ {
+ "authenticatorFlow": true,
+ "requirement": "CONDITIONAL",
+ "priority": 20,
+ "autheticatorFlow": true,
+ "flowAlias": "forms - auth-otp-form - Conditional",
+ "userSetupAllowed": false
+ }
+ ]
+ },
+ {
+ "id": "06555841-cc79-4f16-8497-6c107896e07a",
+ "alias": "forms - auth-otp-form - Conditional",
+ "description": "Flow to determine if the auth-otp-form authenticator should be used or not.",
+ "providerId": "basic-flow",
+ "topLevel": false,
+ "builtIn": true,
+ "authenticationExecutions": [
+ {
+ "authenticator": "conditional-user-configured",
+ "authenticatorFlow": false,
+ "requirement": "REQUIRED",
+ "priority": 10,
+ "autheticatorFlow": false,
+ "userSetupAllowed": false
+ },
+ {
+ "authenticator": "auth-otp-form",
+ "authenticatorFlow": false,
+ "requirement": "REQUIRED",
+ "priority": 20,
+ "autheticatorFlow": false,
+ "userSetupAllowed": false
+ }
+ ]
+ },
+ {
+ "id": "850ed202-6ac8-4dbc-80dd-ef181327bc23",
+ "alias": "http challenge",
+ "description": "An authentication flow based on challenge-response HTTP Authentication Schemes",
+ "providerId": "basic-flow",
+ "topLevel": true,
+ "builtIn": true,
+ "authenticationExecutions": [
+ {
+ "authenticator": "no-cookie-redirect",
+ "authenticatorFlow": false,
+ "requirement": "REQUIRED",
+ "priority": 10,
+ "autheticatorFlow": false,
+ "userSetupAllowed": false
+ },
+ {
+ "authenticator": "basic-auth",
+ "authenticatorFlow": false,
+ "requirement": "REQUIRED",
+ "priority": 20,
+ "autheticatorFlow": false,
+ "userSetupAllowed": false
+ },
+ {
+ "authenticator": "basic-auth-otp",
+ "authenticatorFlow": false,
+ "requirement": "DISABLED",
+ "priority": 30,
+ "autheticatorFlow": false,
+ "userSetupAllowed": false
+ },
+ {
+ "authenticator": "auth-spnego",
+ "authenticatorFlow": false,
+ "requirement": "DISABLED",
+ "priority": 40,
+ "autheticatorFlow": false,
+ "userSetupAllowed": false
+ }
+ ]
+ },
+ {
+ "id": "b5a45a81-fdc4-4473-9194-595b5b09f817",
+ "alias": "registration",
+ "description": "registration flow",
+ "providerId": "basic-flow",
+ "topLevel": true,
+ "builtIn": true,
+ "authenticationExecutions": [
+ {
+ "authenticator": "registration-page-form",
+ "authenticatorFlow": true,
+ "requirement": "REQUIRED",
+ "priority": 10,
+ "autheticatorFlow": true,
+ "flowAlias": "registration form",
+ "userSetupAllowed": false
+ }
+ ]
+ },
+ {
+ "id": "0f2d666e-7413-495e-aeb5-abed50c497f4",
+ "alias": "registration form",
+ "description": "registration form",
+ "providerId": "form-flow",
+ "topLevel": false,
+ "builtIn": true,
+ "authenticationExecutions": [
+ {
+ "authenticator": "registration-user-creation",
+ "authenticatorFlow": false,
+ "requirement": "REQUIRED",
+ "priority": 20,
+ "autheticatorFlow": false,
+ "userSetupAllowed": false
+ },
+ {
+ "authenticator": "registration-profile-action",
+ "authenticatorFlow": false,
+ "requirement": "REQUIRED",
+ "priority": 40,
+ "autheticatorFlow": false,
+ "userSetupAllowed": false
+ },
+ {
+ "authenticator": "registration-password-action",
+ "authenticatorFlow": false,
+ "requirement": "REQUIRED",
+ "priority": 50,
+ "autheticatorFlow": false,
+ "userSetupAllowed": false
+ },
+ {
+ "authenticator": "registration-recaptcha-action",
+ "authenticatorFlow": false,
+ "requirement": "DISABLED",
+ "priority": 60,
+ "autheticatorFlow": false,
+ "userSetupAllowed": false
+ }
+ ]
+ },
+ {
+ "id": "4cbd3b65-cec8-4a0a-8566-50336ad16dc8",
+ "alias": "reset credentials",
+ "description": "Reset credentials for a user if they forgot their password or something",
+ "providerId": "basic-flow",
+ "topLevel": true,
+ "builtIn": true,
+ "authenticationExecutions": [
+ {
+ "authenticator": "reset-credentials-choose-user",
+ "authenticatorFlow": false,
+ "requirement": "REQUIRED",
+ "priority": 10,
+ "autheticatorFlow": false,
+ "userSetupAllowed": false
+ },
+ {
+ "authenticator": "reset-credential-email",
+ "authenticatorFlow": false,
+ "requirement": "REQUIRED",
+ "priority": 20,
+ "autheticatorFlow": false,
+ "userSetupAllowed": false
+ },
+ {
+ "authenticator": "reset-password",
+ "authenticatorFlow": false,
+ "requirement": "REQUIRED",
+ "priority": 30,
+ "autheticatorFlow": false,
+ "userSetupAllowed": false
+ },
+ {
+ "authenticatorFlow": true,
+ "requirement": "CONDITIONAL",
+ "priority": 40,
+ "autheticatorFlow": true,
+ "flowAlias": "reset credentials - reset-otp - Conditional",
+ "userSetupAllowed": false
+ }
+ ]
+ },
+ {
+ "id": "c027601d-55dd-4c36-9821-816815689e48",
+ "alias": "reset credentials - reset-otp - Conditional",
+ "description": "Flow to determine if the reset-otp authenticator should be used or not.",
+ "providerId": "basic-flow",
+ "topLevel": false,
+ "builtIn": true,
+ "authenticationExecutions": [
+ {
+ "authenticator": "conditional-user-configured",
+ "authenticatorFlow": false,
+ "requirement": "REQUIRED",
+ "priority": 10,
+ "autheticatorFlow": false,
+ "userSetupAllowed": false
+ },
+ {
+ "authenticator": "reset-otp",
+ "authenticatorFlow": false,
+ "requirement": "REQUIRED",
+ "priority": 20,
+ "autheticatorFlow": false,
+ "userSetupAllowed": false
+ }
+ ]
+ },
+ {
+ "id": "76a19a9d-bbe9-4274-b743-ee5a001e7cff",
+ "alias": "saml ecp",
+ "description": "SAML ECP Profile Authentication Flow",
+ "providerId": "basic-flow",
+ "topLevel": true,
+ "builtIn": true,
+ "authenticationExecutions": [
+ {
+ "authenticator": "http-basic-authenticator",
+ "authenticatorFlow": false,
+ "requirement": "REQUIRED",
+ "priority": 10,
+ "autheticatorFlow": false,
+ "userSetupAllowed": false
+ }
+ ]
+ }
+ ],
+ "authenticatorConfig": [
+ {
+ "id": "6428d38a-d80b-4cc0-89a2-698c7eb40fbb",
+ "alias": "create unique user config",
+ "config": {
+ "require.password.update.after.registration": "false"
+ }
+ },
+ {
+ "id": "d0dbc8d3-d2e5-4de3-bdb6-83c6a0b2f904",
+ "alias": "review profile config",
+ "config": {
+ "update.profile.on.first.login": "missing"
+ }
+ }
+ ],
+ "requiredActions": [
+ {
+ "alias": "CONFIGURE_TOTP",
+ "name": "Configure OTP",
+ "providerId": "CONFIGURE_TOTP",
+ "enabled": true,
+ "defaultAction": false,
+ "priority": 10,
+ "config": {}
+ },
+ {
+ "alias": "terms_and_conditions",
+ "name": "Terms and Conditions",
+ "providerId": "terms_and_conditions",
+ "enabled": false,
+ "defaultAction": false,
+ "priority": 20,
+ "config": {}
+ },
+ {
+ "alias": "UPDATE_PASSWORD",
+ "name": "Update Password",
+ "providerId": "UPDATE_PASSWORD",
+ "enabled": true,
+ "defaultAction": false,
+ "priority": 30,
+ "config": {}
+ },
+ {
+ "alias": "UPDATE_PROFILE",
+ "name": "Update Profile",
+ "providerId": "UPDATE_PROFILE",
+ "enabled": true,
+ "defaultAction": false,
+ "priority": 40,
+ "config": {}
+ },
+ {
+ "alias": "VERIFY_EMAIL",
+ "name": "Verify Email",
+ "providerId": "VERIFY_EMAIL",
+ "enabled": true,
+ "defaultAction": false,
+ "priority": 50,
+ "config": {}
+ },
+ {
+ "alias": "delete_account",
+ "name": "Delete Account",
+ "providerId": "delete_account",
+ "enabled": false,
+ "defaultAction": false,
+ "priority": 60,
+ "config": {}
+ },
+ {
+ "alias": "update_user_locale",
+ "name": "Update User Locale",
+ "providerId": "update_user_locale",
+ "enabled": true,
+ "defaultAction": false,
+ "priority": 1000,
+ "config": {}
+ }
+ ],
+ "browserFlow": "browser plus basic",
+ "registrationFlow": "registration",
+ "directGrantFlow": "direct grant",
+ "resetCredentialsFlow": "reset credentials",
+ "clientAuthenticationFlow": "clients",
+ "dockerAuthenticationFlow": "docker auth",
+ "attributes": {
+ "cibaBackchannelTokenDeliveryMode": "poll",
+ "cibaExpiresIn": "120",
+ "cibaAuthRequestedUserHint": "login_hint",
+ "oauth2DeviceCodeLifespan": "600",
+ "clientOfflineSessionMaxLifespan": "0",
+ "oauth2DevicePollingInterval": "5",
+ "clientSessionIdleTimeout": "0",
+ "parRequestUriLifespan": "60",
+ "clientSessionMaxLifespan": "0",
+ "clientOfflineSessionIdleTimeout": "0",
+ "cibaInterval": "5",
+ "realmReusableOtpCode": "false"
+ },
+ "keycloakVersion": "20.0.5",
+ "userManagedAccessAllowed": false,
+ "clientProfiles": {
+ "profiles": []
+ },
+ "clientPolicies": {
+ "policies": []
+ }
+}
\ No newline at end of file
diff --git a/app/data/config/mainnet-eth-keycloak/keycloak.env b/app/data/config/mainnet-eth-keycloak/keycloak.env
new file mode 100644
index 00000000..f37fdd30
--- /dev/null
+++ b/app/data/config/mainnet-eth-keycloak/keycloak.env
@@ -0,0 +1,29 @@
+POSTGRES_DB=keycloak
+POSTGRES_USER=keycloak
+POSTGRES_PASSWORD=keycloak
+KC_DB=postgres
+KC_DB_URL_HOST=keycloak-db
+KC_DB_URL_DATABASE=${POSTGRES_DB}
+KC_DB_USERNAME=${POSTGRES_USER}
+KC_DB_PASSWORD=${POSTGRES_PASSWORD}
+KC_DB_SCHEMA=public
+KC_HOSTNAME=localhost
+KC_HTTP_ENABLED="true"
+KC_HTTP_RELATIVE_PATH="/auth"
+KC_HOSTNAME_STRICT_HTTPS="false"
+KEYCLOAK_ADMIN=admin
+KEYCLOAK_ADMIN_PASSWORD=admin
+X_API_CHECK_REALM=cerc
+X_API_CHECK_CLIENT_ID="%user_id%"
+
+
+# keycloak-reg-api
+CERC_KCUSERREG_LISTEN_PORT=9292
+CERC_KCUSERREG_LISTEN_ADDR='0.0.0.0'
+CERC_KCUSERREG_API_URL='http://keycloak:8080/auth'
+CERC_KCUSERREG_REG_USER="${KEYCLOAK_ADMIN}"
+CERC_KCUSERREG_REG_PW="${KEYCLOAK_ADMIN_PASSWORD}"
+CERC_KCUSERREG_REG_CLIENT_ID='admin-cli'
+CERC_KCUSERREG_TARGET_REALM=cerc
+CERC_KCUSERREG_TARGET_GROUPS=eth
+CERC_KCUSERREG_CREATE_ENABLED=true
diff --git a/app/data/config/mainnet-eth-keycloak/nginx.example b/app/data/config/mainnet-eth-keycloak/nginx.example
new file mode 100644
index 00000000..67095551
--- /dev/null
+++ b/app/data/config/mainnet-eth-keycloak/nginx.example
@@ -0,0 +1,107 @@
+server {
+ listen 80;
+ server_name my.example.com;
+
+ # See: https://github.com/acmesh-official/acme.sh/wiki/Stateless-Mode
+ # and https://datatracker.ietf.org/doc/html/rfc8555
+ location ~ ^/\.well-known/acme-challenge/([-_a-zA-Z0-9]+)$ {
+ default_type text/plain;
+ return 200 "$1.MY_ACCOUNT_THUMBPRINT_GOES_HERE";
+ }
+
+ location / {
+ return 301 https://$host$request_uri;
+ }
+}
+
+upstream geth-pool {
+ keepalive 100;
+ hash $user_id consistent;
+ server server-a:8545;
+ server server-b:8545;
+ server server-c:8545;
+}
+
+# self-reg happens on one server for clarity
+upstream reg-ui-pool {
+ keepalive 100;
+ server server-a:8085;
+}
+
+upstream reg-api-pool {
+ keepalive 100;
+ server server-a:8086;
+}
+
+# auth uses server-a if available
+upstream auth-pool {
+ keepalive 100;
+ server server-a:8080;
+ server server-b:8080 backup;
+ server server-c:8080 backup;
+}
+
+log_format upstreamlog '[$time_local] $remote_addr $user_id - $server_name $host to: $upstream_addr: $request $status upstream_response_time $upstream_response_time msec $msec request_time $request_time';
+proxy_cache_path /var/cache/nginx/auth_cache levels=1 keys_zone=auth_cache:1m max_size=5m inactive=60m;
+server {
+ listen 443 ssl http2;
+ server_name my.example.com;
+ access_log /var/log/nginx/my.example.com-access.log upstreamlog;
+ error_log /var/log/nginx/my.example.com-error.log;
+
+ ssl_certificate /etc/nginx/ssl/my.example.com/cert.pem;
+ ssl_certificate_key /etc/nginx/ssl/my.example.com/key.pem;
+
+ error_page 500 502 503 504 /50x.html;
+ location = /50x.html {
+ root /usr/share/nginx/html;
+ }
+
+ #rewrite ^/?$ /newuser/;
+ rewrite ^/?$ https://www.example.com/;
+
+
+ # geth-pool ETH API
+ location ~ ^/v1/eth/?([^/]*)$ {
+ set $apiKey $1;
+ if ($apiKey = '') {
+ set $apiKey $http_X_API_KEY;
+ }
+ auth_request /auth;
+ auth_request_set $user_id $sent_http_x_user_id;
+ rewrite /.*$ / break;
+
+ client_max_body_size 3m;
+ client_body_buffer_size 3m;
+ proxy_buffer_size 32k;
+ proxy_buffers 16 32k;
+ proxy_busy_buffers_size 96k;
+
+ proxy_pass http://geth-pool;
+ proxy_set_header X-Original-Remote-Addr $remote_addr;
+ proxy_set_header X-User-Id $user_id;
+ }
+
+ # keycloak
+ location = /auth {
+ internal;
+ proxy_cache auth_cache;
+ proxy_cache_key "$apiKey";
+ proxy_cache_valid 200 300s;
+ proxy_cache_valid 401 30s;
+ proxy_pass http://auth-pool/auth/realms/cerc/check?memberOf=eth&apiKey=$apiKey;
+ proxy_pass_request_body off;
+ proxy_set_header Content-Length "";
+ proxy_set_header X-Original-URI $request_uri;
+ proxy_set_header X-Original-Remote-Addr $remote_addr;
+ proxy_set_header X-Original-Host $host;
+ }
+
+ location /newuser/ {
+ proxy_pass http://reg-ui-pool/;
+ }
+
+ location /user-api/ {
+ proxy_pass http://reg-api-pool/;
+ }
+}
diff --git a/app/data/config/mainnet-eth-keycloak/scripts/keycloak-mirror/keycloak-mirror.py b/app/data/config/mainnet-eth-keycloak/scripts/keycloak-mirror/keycloak-mirror.py
new file mode 100755
index 00000000..86a90180
--- /dev/null
+++ b/app/data/config/mainnet-eth-keycloak/scripts/keycloak-mirror/keycloak-mirror.py
@@ -0,0 +1,139 @@
+#!/usr/bin/env python3
+
+import argparse
+import os
+import sys
+
+import psycopg
+import random
+
+from subprocess import Popen
+from fabric import Connection
+
+
+def dump_src_db_to_file(db_host, db_port, db_user, db_password, db_name, file_name):
+ command = f"pg_dump -h {db_host} -p {db_port} -U {db_user} -d {db_name} -c --inserts -f {file_name}"
+ my_env = os.environ.copy()
+ my_env["PGPASSWORD"] = db_password
+ print(f"Exporting from {db_host}:{db_port}/{db_name} to {file_name}... ", end="")
+ ret = Popen(command, shell=True, env=my_env).wait()
+ print("DONE")
+ return ret
+
+
+def establish_ssh_tunnel(ssh_host, ssh_port, ssh_user, db_host, db_port):
+ local_port = random.randint(11000, 12000)
+ conn = Connection(host=ssh_host, port=ssh_port, user=ssh_user)
+ fw = conn.forward_local(
+ local_port=local_port, remote_port=db_port, remote_host=db_host
+ )
+ return conn, fw, local_port
+
+
+def load_db_from_file(db_host, db_port, db_user, db_password, db_name, file_name):
+ connstr = "host=%s port=%s user=%s password=%s sslmode=disable dbname=%s" % (
+ db_host,
+ db_port,
+ db_user,
+ db_password,
+ db_name,
+ )
+ with psycopg.connect(connstr) as conn:
+ with conn.cursor() as cur:
+ print(
+ f"Importing from {file_name} to {db_host}:{db_port}/{db_name}... ",
+ end="",
+ )
+ cur.execute(open(file_name, "rt").read())
+ print("DONE")
+
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser()
+
+ parser.add_argument("--src-dbhost", help="DB hostname", default="localhost")
+ parser.add_argument("--src-dbport", help="DB port", default=5432, type=int)
+ parser.add_argument("--src-dbuser", help="DB username", default="keycloak")
+ parser.add_argument("--src-dbpw", help="DB password", required=True)
+ parser.add_argument("--src-dbname", help="dbname", default="keycloak")
+
+ parser.add_argument(
+ "--dst-file", help="Destination filename", default="keycloak-mirror.sql"
+ )
+
+ parser.add_argument("--live-import", help="run the import", action="store_true")
+
+ parser.add_argument("--dst-dbhost", help="DB hostname", default="localhost")
+ parser.add_argument("--dst-dbport", help="DB port", default=5432, type=int)
+ parser.add_argument("--dst-dbuser", help="DB username", default="keycloak")
+ parser.add_argument("--dst-dbpw", help="DB password")
+ parser.add_argument("--dst-dbname", help="dbname", default="keycloak")
+
+ parser.add_argument("--ssh-host", help="SSH hostname")
+ parser.add_argument("--ssh-port", help="SSH port", default=22, type=int)
+ parser.add_argument("--ssh-user", help="SSH user")
+
+ args = parser.parse_args()
+
+ if args.live_import and not args.dst_dbpw:
+ print("--dst-dbpw is required if importing", file=sys.stderr)
+ sys.exit(2)
+
+ remove_sql_file = False
+ if args.dst_dbhost and not args.dst_file:
+ remove_sql_file = True
+
+ dst_file = args.dst_file
+ if not dst_file:
+ dst_file = "keycloak-mirror.sql"
+
+ dump_src_db_to_file(
+ args.src_dbhost,
+ args.src_dbport,
+ args.src_dbuser,
+ args.src_dbpw,
+ args.src_dbname,
+ dst_file,
+ )
+
+ if args.live_import:
+ try:
+ if args.ssh_host:
+ dst_dbport = random.randint(11000, 12000)
+ print(
+ f"Establishing SSH tunnel from 127.0.0.1:{dst_dbport} to "
+ "{args.ssh_host}->{args.dst_dbhost}:{args.dst_dbport}... ",
+ end="",
+ )
+ with Connection(
+ host=args.ssh_host, port=args.ssh_port, user=args.ssh_user
+ ).forward_local(
+ local_port=dst_dbport,
+ remote_port=args.dst_dbport,
+ remote_host=args.dst_dbhost,
+ ):
+ print("DONE")
+
+ load_db_from_file(
+ args.dst_dbhost,
+ args.dst_dbport,
+ args.dst_dbuser,
+ args.dst_dbpw,
+ args.dst_dbname,
+ dst_file,
+ )
+ else:
+ load_db_from_file(
+ args.dst_dbhost,
+ args.dst_dbport,
+ args.dst_dbuser,
+ args.dst_dbpw,
+ args.dst_dbname,
+ dst_file,
+ )
+
+ finally:
+ if args.live_import:
+ print(f"Removing {dst_file}... ", end="")
+ os.remove(dst_file)
+ print("DONE")
diff --git a/app/data/config/mainnet-eth-keycloak/scripts/keycloak-mirror/requirements.txt b/app/data/config/mainnet-eth-keycloak/scripts/keycloak-mirror/requirements.txt
new file mode 100644
index 00000000..45b2bc57
--- /dev/null
+++ b/app/data/config/mainnet-eth-keycloak/scripts/keycloak-mirror/requirements.txt
@@ -0,0 +1,3 @@
+fabric
+psycopg~=3.1.8
+psycopg_binary
diff --git a/app/data/config/mainnet-eth-keycloak/ui/config.yml b/app/data/config/mainnet-eth-keycloak/ui/config.yml
new file mode 100644
index 00000000..6f38a61b
--- /dev/null
+++ b/app/data/config/mainnet-eth-keycloak/ui/config.yml
@@ -0,0 +1,4 @@
+web:
+ path: ''
+api:
+ url: 'http://keycloak-reg-api:9292'
diff --git a/app/data/config/mainnet-eth-metrics/grafana/etc/dashboards/eth_dashboard.json b/app/data/config/mainnet-eth-metrics/grafana/etc/dashboards/eth_dashboard.json
new file mode 100644
index 00000000..623c0a56
--- /dev/null
+++ b/app/data/config/mainnet-eth-metrics/grafana/etc/dashboards/eth_dashboard.json
@@ -0,0 +1,1102 @@
+{
+ "annotations": {
+ "list": [
+ {
+ "builtIn": 1,
+ "datasource": {
+ "type": "grafana",
+ "uid": "-- Grafana --"
+ },
+ "enable": true,
+ "hide": true,
+ "iconColor": "rgba(0, 211, 255, 1)",
+ "name": "Annotations & Alerts",
+ "target": {
+ "limit": 100,
+ "matchAny": false,
+ "tags": [],
+ "type": "dashboard"
+ },
+ "type": "dashboard"
+ }
+ ]
+ },
+ "editable": true,
+ "fiscalYearStartMonth": 0,
+ "graphTooltip": 0,
+ "id": 45,
+ "links": [],
+ "liveNow": false,
+ "panels": [
+ {
+ "collapsed": false,
+ "gridPos": {
+ "h": 1,
+ "w": 24,
+ "x": 0,
+ "y": 0
+ },
+ "id": 16,
+ "panels": [],
+ "title": "Services",
+ "type": "row"
+ },
+ {
+ "datasource": {
+ "type": "prometheus",
+ "uid": "jZUuGao4k"
+ },
+ "fieldConfig": {
+ "defaults": {
+ "color": {
+ "mode": "palette-classic"
+ },
+ "mappings": [],
+ "thresholds": {
+ "mode": "percentage",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ }
+ ]
+ }
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 8,
+ "w": 4,
+ "x": 0,
+ "y": 1
+ },
+ "id": 12,
+ "options": {
+ "colorMode": "value",
+ "graphMode": "none",
+ "justifyMode": "auto",
+ "orientation": "horizontal",
+ "reduceOptions": {
+ "calcs": [
+ "lastNotNull"
+ ],
+ "fields": "",
+ "values": false
+ },
+ "textMode": "auto"
+ },
+ "pluginVersion": "10.1.0",
+ "targets": [
+ {
+ "datasource": {
+ "type": "prometheus",
+ "uid": "jZUuGao4k"
+ },
+ "editorMode": "code",
+ "expr": "chain_head_block",
+ "hide": false,
+ "legendFormat": "geth ({{instance}})",
+ "range": true,
+ "refId": "C"
+ },
+ {
+ "datasource": {
+ "type": "prometheus",
+ "uid": "jZUuGao4k"
+ },
+ "editorMode": "code",
+ "expr": "beacon_head_slot",
+ "hide": false,
+ "legendFormat": "lighthouse ({{instance}})",
+ "range": true,
+ "refId": "A"
+ }
+ ],
+ "title": "Head Positions",
+ "transformations": [
+ ],
+ "type": "stat"
+ },
+ {
+ "datasource": {
+ "type": "prometheus",
+ "uid": "jZUuGao4k"
+ },
+ "fieldConfig": {
+ "defaults": {
+ "color": {
+ "mode": "palette-classic"
+ },
+ "custom": {
+ "axisCenteredZero": false,
+ "axisColorMode": "text",
+ "axisLabel": "",
+ "axisPlacement": "auto",
+ "barAlignment": 0,
+ "drawStyle": "line",
+ "fillOpacity": 0,
+ "gradientMode": "none",
+ "hideFrom": {
+ "legend": false,
+ "tooltip": false,
+ "viz": false
+ },
+ "insertNulls": false,
+ "lineInterpolation": "linear",
+ "lineWidth": 1,
+ "pointSize": 5,
+ "scaleDistribution": {
+ "type": "linear"
+ },
+ "showPoints": "auto",
+ "spanNulls": false,
+ "stacking": {
+ "group": "A",
+ "mode": "none"
+ },
+ "thresholdsStyle": {
+ "mode": "off"
+ }
+ },
+ "decimals": 2,
+ "mappings": [],
+ "min": 0,
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ }
+ ]
+ },
+ "unit": "none"
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 8,
+ "w": 8,
+ "x": 4,
+ "y": 1
+ },
+ "id": 2,
+ "options": {
+ "legend": {
+ "calcs": [],
+ "displayMode": "list",
+ "placement": "bottom",
+ "showLegend": true
+ },
+ "tooltip": {
+ "mode": "single",
+ "sort": "none"
+ }
+ },
+ "pluginVersion": "9.0.1",
+ "targets": [
+ {
+ "datasource": {
+ "type": "prometheus",
+ "uid": "jZUuGao4k"
+ },
+ "editorMode": "code",
+ "exemplar": false,
+ "expr": "rate(keycloak_logins[$__rate_interval]) * 60",
+ "format": "time_series",
+ "instant": false,
+ "interval": "5m",
+ "legendFormat": "{{client_id}} ({{instance}})",
+ "range": true,
+ "refId": "A"
+ }
+ ],
+ "title": "Keycloak Logins/min",
+ "transformations": [
+ {
+ "id": "seriesToColumns",
+ "options": {
+ "byField": "Time"
+ }
+ },
+ {
+ "id": "organize",
+ "options": {
+ "excludeByName": {
+ "00c13eee-e48c-4c7f-872b-f0e4c1e67d7f": true,
+ "0cf1b92d-a15c-40a8-b2ae-49a758e658ee": true,
+ "13761bf2-2c0e-42fd-acc8-09a677ab179e": true,
+ "79a56c4e-0bfb-4ef0-87d4-244c4ace9b4b": true,
+ "9c41e289-92f0-456f-bd65-8f73024a9e45": true,
+ "VVQkSrRksVEKAqUmiNh4aMAVD6BcviyYV4b5LpnCrtUkRwTMoE (jackal.lan.vdb.to:8080)": true,
+ "c9718dd1-06d6-4f98-a821-9adbeb4b663b": true,
+ "nYByi9xhWdDHPpStoPLP3hjeJUzyQou8dsjK8aSipOglB3yx75 (jackal.lan.vdb.to:8080)": true,
+ "oA7XX21gTNMFRJJJdbDzjBVERbjrWamzIGhwHUAeMLrd1sXCWx (jackal.lan.vdb.to:8080)": true,
+ "oJf5jLHEqRbZg7BvSboYX1DM2T25K7XS4jgwF8NNzl3AHNWHm2 (jackal.lan.vdb.to:8080)": true
+ },
+ "indexByName": {},
+ "renameByName": {
+ "00c13eee-e48c-4c7f-872b-f0e4c1e67d7f": "",
+ "0cf1b92d-a15c-40a8-b2ae-49a758e658ee": "rickm",
+ "13761bf2-2c0e-42fd-acc8-09a677ab179e": "telackey",
+ "2753f8be-0036-49ba-a53a-4963573fc15c": "cerc-io",
+ "79a56c4e-0bfb-4ef0-87d4-244c4ace9b4b": "",
+ "9c41e289-92f0-456f-bd65-8f73024a9e45": "",
+ "c9718dd1-06d6-4f98-a821-9adbeb4b663b": "dboreham",
+ "f5083aa7-5b5d-4164-b189-d7a559c4aad0": "infura0"
+ }
+ }
+ }
+ ],
+ "type": "timeseries"
+ },
+ {
+ "datasource": {
+ "type": "prometheus",
+ "uid": "jZUuGao4k"
+ },
+ "fieldConfig": {
+ "defaults": {
+ "color": {
+ "mode": "palette-classic"
+ },
+ "custom": {
+ "axisCenteredZero": false,
+ "axisColorMode": "text",
+ "axisLabel": "",
+ "axisPlacement": "auto",
+ "barAlignment": 0,
+ "drawStyle": "line",
+ "fillOpacity": 28,
+ "gradientMode": "none",
+ "hideFrom": {
+ "legend": false,
+ "tooltip": false,
+ "viz": false
+ },
+ "insertNulls": false,
+ "lineInterpolation": "smooth",
+ "lineWidth": 1,
+ "pointSize": 5,
+ "scaleDistribution": {
+ "type": "linear"
+ },
+ "showPoints": "auto",
+ "spanNulls": false,
+ "stacking": {
+ "group": "A",
+ "mode": "normal"
+ },
+ "thresholdsStyle": {
+ "mode": "off"
+ }
+ },
+ "mappings": [],
+ "min": 0,
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ },
+ {
+ "color": "red",
+ "value": 80
+ }
+ ]
+ }
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 8,
+ "w": 12,
+ "x": 12,
+ "y": 1
+ },
+ "id": 20,
+ "options": {
+ "legend": {
+ "calcs": [],
+ "displayMode": "list",
+ "placement": "bottom",
+ "showLegend": true
+ },
+ "tooltip": {
+ "mode": "single",
+ "sort": "none"
+ }
+ },
+ "targets": [
+ {
+ "datasource": {
+ "type": "prometheus",
+ "uid": "jZUuGao4k"
+ },
+ "editorMode": "code",
+ "expr": "rate(rpc_duration_all_count[$__rate_interval])",
+ "hide": false,
+ "instant": false,
+ "legendFormat": "Requests ({{instance}})",
+ "range": true,
+ "refId": "A"
+ }
+ ],
+ "title": "geth API Requests/sec",
+ "transformations": [
+ ],
+ "type": "timeseries"
+ },
+ {
+ "datasource": {
+ "type": "prometheus",
+ "uid": "jZUuGao4k"
+ },
+ "fieldConfig": {
+ "defaults": {
+ "color": {
+ "mode": "palette-classic"
+ },
+ "custom": {
+ "axisCenteredZero": false,
+ "axisColorMode": "text",
+ "axisLabel": "",
+ "axisPlacement": "auto",
+ "barAlignment": 0,
+ "drawStyle": "line",
+ "fillOpacity": 0,
+ "gradientMode": "none",
+ "hideFrom": {
+ "legend": false,
+ "tooltip": false,
+ "viz": false
+ },
+ "insertNulls": false,
+ "lineInterpolation": "linear",
+ "lineWidth": 1,
+ "pointSize": 5,
+ "scaleDistribution": {
+ "type": "linear"
+ },
+ "showPoints": "auto",
+ "spanNulls": false,
+ "stacking": {
+ "group": "A",
+ "mode": "none"
+ },
+ "thresholdsStyle": {
+ "mode": "off"
+ }
+ },
+ "mappings": [],
+ "min": 0,
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ },
+ {
+ "color": "red",
+ "value": 80
+ }
+ ]
+ }
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 8,
+ "w": 12,
+ "x": 0,
+ "y": 9
+ },
+ "id": 67,
+ "options": {
+ "legend": {
+ "calcs": [],
+ "displayMode": "list",
+ "placement": "bottom",
+ "showLegend": true
+ },
+ "tooltip": {
+ "mode": "single",
+ "sort": "none"
+ }
+ },
+ "pluginVersion": "9.0.1",
+ "targets": [
+ {
+ "datasource": {
+ "type": "prometheus",
+ "uid": "jZUuGao4k"
+ },
+ "editorMode": "code",
+ "expr": "p2p_peers{job=\"geth\"}",
+ "legendFormat": "geth ({{instance}})",
+ "range": true,
+ "refId": "A"
+ },
+ {
+ "datasource": {
+ "type": "prometheus",
+ "uid": "jZUuGao4k"
+ },
+ "editorMode": "code",
+ "expr": "network_inbound_peers{job=\"lighthouse-beacon\"}",
+ "hide": false,
+ "legendFormat": "lighthouse ({{instance}})",
+ "range": true,
+ "refId": "B"
+ }
+ ],
+ "title": "P2P Peers",
+ "transformations": [
+ ],
+ "type": "timeseries"
+ },
+ {
+ "datasource": {
+ "type": "prometheus",
+ "uid": "jZUuGao4k"
+ },
+ "fieldConfig": {
+ "defaults": {
+ "color": {
+ "mode": "palette-classic"
+ },
+ "custom": {
+ "axisCenteredZero": false,
+ "axisColorMode": "text",
+ "axisLabel": "",
+ "axisPlacement": "auto",
+ "barAlignment": 0,
+ "drawStyle": "line",
+ "fillOpacity": 0,
+ "gradientMode": "none",
+ "hideFrom": {
+ "legend": false,
+ "tooltip": false,
+ "viz": false
+ },
+ "insertNulls": false,
+ "lineInterpolation": "linear",
+ "lineWidth": 1,
+ "pointSize": 5,
+ "scaleDistribution": {
+ "type": "linear"
+ },
+ "showPoints": "auto",
+ "spanNulls": false,
+ "stacking": {
+ "group": "A",
+ "mode": "none"
+ },
+ "thresholdsStyle": {
+ "mode": "off"
+ }
+ },
+ "mappings": [],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ },
+ {
+ "color": "red",
+ "value": 80
+ }
+ ]
+ },
+ "unit": "binBps"
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 8,
+ "w": 12,
+ "x": 12,
+ "y": 9
+ },
+ "id": 32,
+ "options": {
+ "legend": {
+ "calcs": [],
+ "displayMode": "list",
+ "placement": "bottom",
+ "showLegend": true
+ },
+ "tooltip": {
+ "mode": "single",
+ "sort": "none"
+ }
+ },
+ "targets": [
+ {
+ "datasource": {
+ "type": "prometheus",
+ "uid": "jZUuGao4k"
+ },
+ "editorMode": "code",
+ "expr": "rate(system_disk_writebytes[$__rate_interval])",
+ "interval": "",
+ "legendFormat": "Process Write ({{instance}})",
+ "range": true,
+ "refId": "A"
+ },
+ {
+ "datasource": {
+ "type": "prometheus",
+ "uid": "jZUuGao4k"
+ },
+ "editorMode": "code",
+ "expr": "rate(system_disk_readbytes[$__rate_interval])",
+ "hide": false,
+ "legendFormat": "Process Read ({{instance}})",
+ "range": true,
+ "refId": "B"
+ },
+ {
+ "datasource": {
+ "type": "prometheus",
+ "uid": "jZUuGao4k"
+ },
+ "editorMode": "code",
+ "expr": "rate(eth_db_chaindata_disk_read[$__rate_interval])",
+ "hide": false,
+ "legendFormat": "ethdb Read ({{instance}})",
+ "range": true,
+ "refId": "C"
+ },
+ {
+ "datasource": {
+ "type": "prometheus",
+ "uid": "jZUuGao4k"
+ },
+ "editorMode": "code",
+ "expr": "rate(eth_db_chaindata_disk_write[$__rate_interval])",
+ "hide": false,
+ "legendFormat": "ethdb Write ({{instance}})",
+ "range": true,
+ "refId": "D"
+ }
+ ],
+ "title": "geth Disk Bytes/sec",
+ "transformations": [
+ ],
+ "type": "timeseries"
+ },
+ {
+ "datasource": {
+ "type": "prometheus",
+ "uid": "jZUuGao4k"
+ },
+ "fieldConfig": {
+ "defaults": {
+ "color": {
+ "mode": "palette-classic"
+ },
+ "custom": {
+ "axisCenteredZero": false,
+ "axisColorMode": "text",
+ "axisLabel": "",
+ "axisPlacement": "auto",
+ "barAlignment": 0,
+ "drawStyle": "line",
+ "fillOpacity": 0,
+ "gradientMode": "none",
+ "hideFrom": {
+ "legend": false,
+ "tooltip": false,
+ "viz": false
+ },
+ "insertNulls": false,
+ "lineInterpolation": "linear",
+ "lineWidth": 1,
+ "pointSize": 5,
+ "scaleDistribution": {
+ "type": "linear"
+ },
+ "showPoints": "auto",
+ "spanNulls": false,
+ "stacking": {
+ "group": "A",
+ "mode": "none"
+ },
+ "thresholdsStyle": {
+ "mode": "off"
+ }
+ },
+ "mappings": [],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ },
+ {
+ "color": "red",
+ "value": 80
+ }
+ ]
+ }
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 8,
+ "w": 12,
+ "x": 0,
+ "y": 17
+ },
+ "id": 34,
+ "options": {
+ "legend": {
+ "calcs": [],
+ "displayMode": "list",
+ "placement": "bottom",
+ "showLegend": true
+ },
+ "tooltip": {
+ "mode": "single",
+ "sort": "none"
+ }
+ },
+ "targets": [
+ {
+ "datasource": {
+ "type": "prometheus",
+ "uid": "jZUuGao4k"
+ },
+ "editorMode": "code",
+ "expr": "rate(system_disk_readcount[$__rate_interval])",
+ "legendFormat": "Process Read ({{instance}})",
+ "range": true,
+ "refId": "A"
+ },
+ {
+ "datasource": {
+ "type": "prometheus",
+ "uid": "jZUuGao4k"
+ },
+ "editorMode": "code",
+ "expr": "rate(system_disk_writecount[$__rate_interval])",
+ "hide": false,
+ "legendFormat": "Process Write ({{instance}})",
+ "range": true,
+ "refId": "B"
+ }
+ ],
+ "title": "geth Disk Operations/sec",
+ "transformations": [
+ ],
+ "type": "timeseries"
+ },
+ {
+ "datasource": {
+ "type": "prometheus",
+ "uid": "jZUuGao4k"
+ },
+ "fieldConfig": {
+ "defaults": {
+ "color": {
+ "mode": "palette-classic"
+ },
+ "custom": {
+ "axisCenteredZero": false,
+ "axisColorMode": "text",
+ "axisLabel": "",
+ "axisPlacement": "auto",
+ "barAlignment": 0,
+ "drawStyle": "line",
+ "fillOpacity": 0,
+ "gradientMode": "none",
+ "hideFrom": {
+ "legend": false,
+ "tooltip": false,
+ "viz": false
+ },
+ "insertNulls": false,
+ "lineInterpolation": "linear",
+ "lineWidth": 1,
+ "pointSize": 5,
+ "scaleDistribution": {
+ "type": "linear"
+ },
+ "showPoints": "auto",
+ "spanNulls": false,
+ "stacking": {
+ "group": "A",
+ "mode": "none"
+ },
+ "thresholdsStyle": {
+ "mode": "off"
+ }
+ },
+ "mappings": [],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ },
+ {
+ "color": "red",
+ "value": 80
+ }
+ ]
+ }
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 8,
+ "w": 12,
+ "x": 12,
+ "y": 17
+ },
+ "id": 61,
+ "options": {
+ "legend": {
+ "calcs": [],
+ "displayMode": "list",
+ "placement": "bottom",
+ "showLegend": true
+ },
+ "tooltip": {
+ "mode": "single",
+ "sort": "none"
+ }
+ },
+ "targets": [
+ {
+ "datasource": {
+ "type": "prometheus",
+ "uid": "jZUuGao4k"
+ },
+ "editorMode": "code",
+ "expr": "rate(eth_db_chaindata_db_get_time_count[$__rate_interval])",
+ "legendFormat": "get ({{instance}})",
+ "range": true,
+ "refId": "A"
+ },
+ {
+ "datasource": {
+ "type": "prometheus",
+ "uid": "jZUuGao4k"
+ },
+ "editorMode": "code",
+ "expr": "rate(eth_db_chaindata_db_put_time_count[$__rate_interval])",
+ "hide": false,
+ "legendFormat": "put ({{instance}})",
+ "range": true,
+ "refId": "C"
+ },
+ {
+ "datasource": {
+ "type": "prometheus",
+ "uid": "jZUuGao4k"
+ },
+ "editorMode": "code",
+ "expr": "rate(eth_db_chaindata_db_has_time_count[$__rate_interval])",
+ "hide": false,
+ "legendFormat": "has ({{instance}})",
+ "range": true,
+ "refId": "B"
+ },
+ {
+ "datasource": {
+ "type": "prometheus",
+ "uid": "jZUuGao4k"
+ },
+ "editorMode": "code",
+ "expr": "rate(eth_db_chaindata_db_delete_time_count[$__rate_interval])",
+ "hide": false,
+ "legendFormat": "delete ({{instance}})",
+ "range": true,
+ "refId": "D"
+ },
+ {
+ "datasource": {
+ "type": "prometheus",
+ "uid": "jZUuGao4k"
+ },
+ "editorMode": "code",
+ "expr": "rate(eth_db_chaindata_db_batch_write_time_count[$__rate_interval])",
+ "hide": false,
+ "legendFormat": "batch_write ({{instance}})",
+ "range": true,
+ "refId": "E"
+ }
+ ],
+ "title": "geth LevelDB Operations/sec",
+ "transformations": [
+ ],
+ "type": "timeseries"
+ },
+ {
+ "datasource": {
+ "type": "prometheus",
+ "uid": "jZUuGao4k"
+ },
+ "description": "",
+ "fieldConfig": {
+ "defaults": {
+ "color": {
+ "mode": "palette-classic"
+ },
+ "custom": {
+ "axisCenteredZero": false,
+ "axisColorMode": "text",
+ "axisLabel": "",
+ "axisPlacement": "auto",
+ "barAlignment": 0,
+ "drawStyle": "line",
+ "fillOpacity": 0,
+ "gradientMode": "none",
+ "hideFrom": {
+ "legend": false,
+ "tooltip": false,
+ "viz": false
+ },
+ "insertNulls": false,
+ "lineInterpolation": "linear",
+ "lineWidth": 1,
+ "pointSize": 5,
+ "scaleDistribution": {
+ "type": "linear"
+ },
+ "showPoints": "auto",
+ "spanNulls": false,
+ "stacking": {
+ "group": "A",
+ "mode": "none"
+ },
+ "thresholdsStyle": {
+ "mode": "off"
+ }
+ },
+ "mappings": [],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ },
+ {
+ "color": "red",
+ "value": 80
+ }
+ ]
+ },
+ "unit": "percent"
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 8,
+ "w": 12,
+ "x": 0,
+ "y": 25
+ },
+ "id": 63,
+ "options": {
+ "legend": {
+ "calcs": [],
+ "displayMode": "list",
+ "placement": "bottom",
+ "showLegend": true
+ },
+ "tooltip": {
+ "mode": "single",
+ "sort": "none"
+ }
+ },
+ "targets": [
+ {
+ "datasource": {
+ "type": "prometheus",
+ "uid": "jZUuGao4k"
+ },
+ "editorMode": "code",
+ "expr": "rate(eth_db_chaindata_db_get_time_total[$__rate_interval]) / 10000000",
+ "legendFormat": "get ({{instance}})",
+ "range": true,
+ "refId": "A"
+ },
+ {
+ "datasource": {
+ "type": "prometheus",
+ "uid": "jZUuGao4k"
+ },
+ "editorMode": "code",
+ "expr": "rate(eth_db_chaindata_db_put_time_total[$__rate_interval]) / 10000000",
+ "hide": false,
+ "legendFormat": "put ({{instance}})",
+ "range": true,
+ "refId": "B"
+ },
+ {
+ "datasource": {
+ "type": "prometheus",
+ "uid": "jZUuGao4k"
+ },
+ "editorMode": "code",
+ "expr": "rate(eth_db_chaindata_db_has_time_total[$__rate_interval]) / 10000000",
+ "hide": false,
+ "legendFormat": "has ({{instance}})",
+ "range": true,
+ "refId": "C"
+ },
+ {
+ "datasource": {
+ "type": "prometheus",
+ "uid": "jZUuGao4k"
+ },
+ "editorMode": "code",
+ "expr": "rate(eth_db_chaindata_db_delete_time_total[$__rate_interval]) / 10000000",
+ "hide": false,
+ "legendFormat": "delete ({{instance}})",
+ "range": true,
+ "refId": "D"
+ },
+ {
+ "datasource": {
+ "type": "prometheus",
+ "uid": "jZUuGao4k"
+ },
+ "editorMode": "code",
+ "expr": "rate(eth_db_chaindata_db_batch_write_time_total[$__rate_interval]) / 10000000",
+ "hide": false,
+ "legendFormat": "batch_write ({{instance}})",
+ "range": true,
+ "refId": "E"
+ }
+ ],
+ "title": "geth LevelDB Utilization",
+ "transformations": [
+ ],
+ "type": "timeseries"
+ },
+ {
+ "datasource": {
+ "type": "prometheus",
+ "uid": "jZUuGao4k"
+ },
+ "fieldConfig": {
+ "defaults": {
+ "color": {
+ "mode": "palette-classic"
+ },
+ "custom": {
+ "axisCenteredZero": false,
+ "axisColorMode": "text",
+ "axisLabel": "",
+ "axisPlacement": "auto",
+ "barAlignment": 0,
+ "drawStyle": "line",
+ "fillOpacity": 0,
+ "gradientMode": "none",
+ "hideFrom": {
+ "legend": false,
+ "tooltip": false,
+ "viz": false
+ },
+ "insertNulls": false,
+ "lineInterpolation": "linear",
+ "lineWidth": 1,
+ "pointSize": 5,
+ "scaleDistribution": {
+ "type": "linear"
+ },
+ "showPoints": "auto",
+ "spanNulls": false,
+ "stacking": {
+ "group": "A",
+ "mode": "none"
+ },
+ "thresholdsStyle": {
+ "mode": "off"
+ }
+ },
+ "mappings": [],
+ "min": 0,
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ },
+ {
+ "color": "red",
+ "value": 80
+ }
+ ]
+ }
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 8,
+ "w": 12,
+ "x": 12,
+ "y": 25
+ },
+ "id": 65,
+ "options": {
+ "legend": {
+ "calcs": [],
+ "displayMode": "list",
+ "placement": "bottom",
+ "showLegend": true
+ },
+ "tooltip": {
+ "mode": "single",
+ "sort": "none"
+ }
+ },
+ "targets": [
+ {
+ "datasource": {
+ "type": "prometheus",
+ "uid": "jZUuGao4k"
+ },
+ "editorMode": "code",
+ "expr": "rate(eth_db_chaindata_db_batch_item_count[$__rate_interval])",
+ "legendFormat": "batch_items ({{instance}})",
+ "range": true,
+ "refId": "A"
+ }
+ ],
+ "title": "geth LevelDB Batch Items Written/sec",
+ "transformations": [
+ ],
+ "type": "timeseries"
+ }
+ ],
+ "refresh": "30s",
+ "schemaVersion": 38,
+ "style": "dark",
+ "tags": [],
+ "templating": {
+ "list": []
+ },
+ "time": {
+ "from": "now-6h",
+ "to": "now"
+ },
+ "timepicker": {},
+ "timezone": "",
+ "title": "eth API Simple",
+ "uid": "b4ba9b2e-965d-4c71-8735-78b6af93156e",
+ "version": 4,
+ "weekStart": ""
+}
diff --git a/app/data/config/mainnet-eth-metrics/grafana/etc/provisioning/dashboards/dashboards.yml b/app/data/config/mainnet-eth-metrics/grafana/etc/provisioning/dashboards/dashboards.yml
new file mode 100644
index 00000000..4705688e
--- /dev/null
+++ b/app/data/config/mainnet-eth-metrics/grafana/etc/provisioning/dashboards/dashboards.yml
@@ -0,0 +1,9 @@
+apiVersion: 1
+
+providers:
+ - name: dashboards
+ type: file
+ updateIntervalSeconds: 10
+ options:
+ path: /etc/grafana/dashboards
+ foldersFromFilesStructure: true
diff --git a/app/data/config/mainnet-eth-metrics/grafana/etc/provisioning/datasources/prometheus.yml b/app/data/config/mainnet-eth-metrics/grafana/etc/provisioning/datasources/prometheus.yml
new file mode 100644
index 00000000..f5181ebd
--- /dev/null
+++ b/app/data/config/mainnet-eth-metrics/grafana/etc/provisioning/datasources/prometheus.yml
@@ -0,0 +1,19 @@
+apiVersion: 1
+
+datasources:
+ - id: 1
+ uid: jZUuGao4k
+ orgId: 1
+ name: Prometheus
+ type: prometheus
+ typeName: Prometheus
+ typeLogoUrl: public/app/plugins/datasource/prometheus/img/prometheus_logo.svg
+ access: proxy
+ url: http://prometheus:9090
+ user: ""
+ database: ""
+ basicAuth: false
+ isDefault: true
+ jsonData:
+ httpMethod: POST
+ readOnly: false
diff --git a/app/data/config/mainnet-eth-metrics/metrics.env b/app/data/config/mainnet-eth-metrics/metrics.env
new file mode 100644
index 00000000..1cb51f6b
--- /dev/null
+++ b/app/data/config/mainnet-eth-metrics/metrics.env
@@ -0,0 +1,2 @@
+# grafana
+GF_SECURITY_ADMIN_PASSWORD=changeme6325
diff --git a/app/data/config/mainnet-eth-metrics/prometheus/etc/prometheus.yml b/app/data/config/mainnet-eth-metrics/prometheus/etc/prometheus.yml
new file mode 100644
index 00000000..4cc74fc0
--- /dev/null
+++ b/app/data/config/mainnet-eth-metrics/prometheus/etc/prometheus.yml
@@ -0,0 +1,26 @@
+global:
+ scrape_interval: 5s
+ evaluation_interval: 15s
+
+scrape_configs:
+ # geth
+ - job_name: 'geth'
+ metrics_path: /debug/metrics/prometheus
+ scheme: http
+ static_configs:
+ - targets: ['mainnet-eth-geth-1:6060']
+
+ # lighthouse
+ - job_name: 'lighthouse'
+ metrics_path: /metrics
+ scheme: http
+ static_configs:
+ - targets: ['mainnet-eth-lighthouse-1:5054']
+
+ # keycloak
+ - job_name: 'keycloak'
+ scrape_interval: 5s
+ metrics_path: /auth/realms/cerc/metrics
+ scheme: http
+ static_configs:
+ - targets: ['keycloak:8080']
diff --git a/app/data/config/mainnet-eth/geth.env b/app/data/config/mainnet-eth/geth.env
new file mode 100644
index 00000000..a01444df
--- /dev/null
+++ b/app/data/config/mainnet-eth/geth.env
@@ -0,0 +1,57 @@
+# Enable remote debugging using dlv
+CERC_REMOTE_DEBUG=false
+
+# Enable startup script debug output.
+CERC_SCRIPT_DEBUG=false
+
+# Simple toggle to choose either a 'full' node or an 'archive' node
+# (controls the values of --syncmode --gcmode --snapshot)
+CERC_GETH_MODE_QUICK_SET=full
+
+# Optional custom node name.
+# GETH_NODE_NAME=""
+
+# Specify any other geth CLI options.
+GETH_OPTS=""
+
+# --cache
+GETH_CACHE=1024
+
+# --cache.database
+GETH_CACHE_DB=50
+
+# --cache.gc
+GETH_CACHE_GC=25
+
+# --cache.trie
+GETH_CACHE_TRIE=15
+j
+# --datadir
+GETH_DATADIR="/data"
+
+# --http.api
+GETH_HTTP_API="eth,web3,net"
+
+# --authrpc.jwtsecret
+GETH_JWTSECRET="/etc/mainnet-eth/jwtsecret"
+
+# --maxpeers
+GETH_MAX_PEERS=100
+
+# --rpc.evmtimeout
+GETH_RPC_EVMTIMEOUT=0
+
+# --rpc.gascap
+GETH_RPC_GASCAP=0
+
+# --txlookuplimit
+GETH_TXLOOKUPLIMIT=0
+
+# --verbosity
+GETH_VERBOSITY=3
+
+# --log.vmodule
+GETH_VMODULE="rpc/*=4"
+
+# --ws.api
+GETH_WS_API="eth,web3,net"
diff --git a/app/data/config/mainnet-eth/lighthouse.env b/app/data/config/mainnet-eth/lighthouse.env
new file mode 100644
index 00000000..11fc6b69
--- /dev/null
+++ b/app/data/config/mainnet-eth/lighthouse.env
@@ -0,0 +1,33 @@
+# Enable startup script debug output.
+CERC_SCRIPT_DEBUG=false
+
+# Specify any other lighthouse CLI options.
+LIGHTHOUSE_OPTS=""
+
+# Override the advertised public IP (optional)
+# --enr-address
+#LIGHTHOUSE_ENR_ADDRESS=""
+
+# --checkpoint-sync-url
+LIGHTHOUSE_CHECKPOINT_SYNC_URL="https://beaconstate.ethstaker.cc"
+
+# --checkpoint-sync-url-timeout
+LIGHTHOUSE_CHECKPOINT_SYNC_URL_TIMEOUT=300
+
+# --datadir
+LIGHTHOUSE_DATADIR=/data
+
+# --debug-level
+LIGHTHOUSE_DEBUG_LEVEL=info
+
+# --http-port
+LIGHTHOUSE_HTTP_PORT=5052
+
+# --execution-jwt
+LIGHTHOUSE_JWTSECRET=/etc/mainnet-eth/jwtsecret
+
+# --metrics-port
+LIGHTHOUSE_METRICS_PORT=5054
+
+# --port --enr-udp-port --enr-tcp-port
+LIGHTHOUSE_NETWORK_PORT=9000
diff --git a/app/data/config/mainnet-eth/scripts/run-geth.sh b/app/data/config/mainnet-eth/scripts/run-geth.sh
index 3844ef99..7cc5d54b 100755
--- a/app/data/config/mainnet-eth/scripts/run-geth.sh
+++ b/app/data/config/mainnet-eth/scripts/run-geth.sh
@@ -1,12 +1,10 @@
#!/bin/sh
-if [[ -n "$CERC_SCRIPT_DEBUG" ]]; then
+if [[ "true" == "$CERC_SCRIPT_DEBUG" ]]; then
set -x
fi
-CERC_ETH_DATADIR=/root/ethdata
-
START_CMD="geth"
-if [ "true" == "$CERC_REMOTE_DEBUG" ] && [ -x "/usr/local/bin/dlv" ]; then
+if [[ "true" == "$CERC_REMOTE_DEBUG" ]] && [[ -x "/usr/local/bin/dlv" ]]; then
START_CMD="/usr/local/bin/dlv --listen=:40000 --headless=true --api-version=2 --accept-multiclient exec /usr/local/bin/geth --continue --"
fi
@@ -22,29 +20,44 @@ cleanup() {
wait
echo "Done"
}
-
trap 'cleanup' SIGINT SIGTERM
+MODE_FLAGS=""
+if [[ "$CERC_GETH_MODE_QUICK_SET" = "archive" ]]; then
+ MODE_FLAGS="--syncmode=${GETH_SYNC_MODE:-full} --gcmode=${GETH_GC_MODE:-archive} --snapshot=${GETH_SNAPSHOT:-false}"
+else
+ MODE_FLAGS="--syncmode=${GETH_SYNC_MODE:-snap} --gcmode=${GETH_GC_MODE:-full} --snapshot=${GETH_SNAPSHOT:-true}"
+fi
+
$START_CMD \
- --datadir="${CERC_ETH_DATADIR}" \
- --authrpc.addr="0.0.0.0" \
- --authrpc.port 8551 \
- --authrpc.vhosts="*" \
- --authrpc.jwtsecret="/etc/mainnet-eth/jwtsecret" \
- --ws \
- --ws.addr="0.0.0.0" \
- --ws.origins="*" \
- --ws.api="${CERC_GETH_WS_APIS:-eth,web3,net,admin,personal,debug,statediff}" \
- --http.corsdomain="*" \
- --gcmode full \
- --txlookuplimit=0 \
- --cache.preimages \
- --syncmode=snap \
- &
+ $MODE_FLAGS \
+ --datadir="${GETH_DATADIR}"\
+ --identity="${GETH_NODE_NAME}" \
+ --maxpeers=${GETH_MAX_PEERS} \
+ --cache=${GETH_CACHE} \
+ --cache.gc=${GETH_CACHE_GC} \
+ --cache.database=${GETH_CACHE_DB} \
+ --cache.trie=${GETH_CACHE_TRIE} \
+ --authrpc.addr='0.0.0.0' \
+ --authrpc.vhosts='*' \
+ --authrpc.jwtsecret="${GETH_JWTSECRET}" \
+ --http \
+ --http.addr='0.0.0.0' \
+ --http.api="${GETH_HTTP_API}" \
+ --http.vhosts='*' \
+ --metrics \
+ --metrics.addr='0.0.0.0' \
+ --ws \
+ --ws.addr='0.0.0.0' \
+ --ws.api="${GETH_WS_API}" \
+ --rpc.gascap=${GETH_RPC_GASCAP} \
+ --rpc.evmtimeout=${GETH_RPC_EVMTIMEOUT} \
+ --txlookuplimit=${GETH_TXLOOKUPLIMIT} \
+ --verbosity=${GETH_VERBOSITY} \
+ --log.vmodule="${GETH_VMODULE}" \
+ ${GETH_OPTS} &
geth_pid=$!
-
-
wait $geth_pid
if [ "true" == "$CERC_KEEP_RUNNING_AFTER_GETH_EXIT" ]; then
diff --git a/app/data/config/mainnet-eth/scripts/run-lighthouse.sh b/app/data/config/mainnet-eth/scripts/run-lighthouse.sh
index 02c97922..efda735b 100755
--- a/app/data/config/mainnet-eth/scripts/run-lighthouse.sh
+++ b/app/data/config/mainnet-eth/scripts/run-lighthouse.sh
@@ -1,22 +1,30 @@
#!/bin/bash
-if [[ -n "$CERC_SCRIPT_DEBUG" ]]; then
+if [[ "true" == "$CERC_SCRIPT_DEBUG" ]]; then
set -x
fi
-DEBUG_LEVEL=${CERC_LIGHTHOUSE_DEBUG_LEVEL:-info}
+ENR_OPTS=""
+if [[ -n "$LIGHTHOUSE_ENR_ADDRESS" ]]; then
+ ENR_OPTS="--enr-address $LIGHTHOUSE_ENR_ADDRESS"
+fi
-data_dir=/var/lighthouse-data-dir
-
-network_port=9001
-http_port=8001
-authrpc_port=8551
-
-exec lighthouse \
- bn \
- --debug-level $DEBUG_LEVEL \
- --datadir $data_dir \
- --network mainnet \
- --execution-endpoint $EXECUTION_ENDPOINT \
- --execution-jwt /etc/mainnet-eth/jwtsecret \
+exec lighthouse bn \
+ --checkpoint-sync-url "$LIGHTHOUSE_CHECKPOINT_SYNC_URL" \
+ --checkpoint-sync-url-timeout ${LIGHTHOUSE_CHECKPOINT_SYNC_URL_TIMEOUT} \
+ --datadir "$LIGHTHOUSE_DATADIR" \
+ --debug-level $LIGHTHOUSE_DEBUG_LEVEL \
--disable-deposit-contract-sync \
- --checkpoint-sync-url https://beaconstate.ethstaker.cc
+ --disable-upnp \
+ --enr-tcp-port $LIGHTHOUSE_NETWORK_PORT \
+ --enr-udp-port $LIGHTHOUSE_NETWORK_PORT \
+ --execution-endpoint "$LIGHTHOUSE_EXECUTION_ENDPOINT" \
+ --execution-jwt /etc/mainnet-eth/jwtsecret \
+ --http \
+ --http-address 0.0.0.0 \
+ --http-port $LIGHTHOUSE_HTTP_PORT \
+ --metrics \
+ --metrics-address=0.0.0.0 \
+ --metrics-port $LIGHTHOUSE_METRICS_PORT \
+ --network mainnet \
+ --port $LIGHTHOUSE_NETWORK_PORT \
+ $ENR_OPTS $LIGHTHOUSE_OPTS
diff --git a/app/data/config/mainnet-laconicd/create-fixturenet.sh b/app/data/config/mainnet-laconicd/create-fixturenet.sh
deleted file mode 100644
index 9c30bff8..00000000
--- a/app/data/config/mainnet-laconicd/create-fixturenet.sh
+++ /dev/null
@@ -1,118 +0,0 @@
-#!/bin/bash
-
-# TODO: this file is now an unmodified copy of cerc-io/laconicd/init.sh
-# so we should have a mechanism to bundle it inside the container rather than link from here
-# at deploy time.
-
-KEY="mykey"
-CHAINID="laconic_9000-1"
-MONIKER="localtestnet"
-KEYRING="test"
-KEYALGO="eth_secp256k1"
-LOGLEVEL="info"
-# trace evm
-TRACE="--trace"
-# TRACE=""
-
-# validate dependencies are installed
-command -v jq > /dev/null 2>&1 || { echo >&2 "jq not installed. More info: https://stedolan.github.io/jq/download/"; exit 1; }
-
-# remove existing daemon and client
-rm -rf ~/.laconic*
-
-make install
-
-laconicd config keyring-backend $KEYRING
-laconicd config chain-id $CHAINID
-
-# if $KEY exists it should be deleted
-laconicd keys add $KEY --keyring-backend $KEYRING --algo $KEYALGO
-
-# Set moniker and chain-id for Ethermint (Moniker can be anything, chain-id must be an integer)
-laconicd init $MONIKER --chain-id $CHAINID
-
-# Change parameter token denominations to aphoton
-cat $HOME/.laconicd/config/genesis.json | jq '.app_state["staking"]["params"]["bond_denom"]="aphoton"' > $HOME/.laconicd/config/tmp_genesis.json && mv $HOME/.laconicd/config/tmp_genesis.json $HOME/.laconicd/config/genesis.json
-cat $HOME/.laconicd/config/genesis.json | jq '.app_state["crisis"]["constant_fee"]["denom"]="aphoton"' > $HOME/.laconicd/config/tmp_genesis.json && mv $HOME/.laconicd/config/tmp_genesis.json $HOME/.laconicd/config/genesis.json
-cat $HOME/.laconicd/config/genesis.json | jq '.app_state["gov"]["deposit_params"]["min_deposit"][0]["denom"]="aphoton"' > $HOME/.laconicd/config/tmp_genesis.json && mv $HOME/.laconicd/config/tmp_genesis.json $HOME/.laconicd/config/genesis.json
-cat $HOME/.laconicd/config/genesis.json | jq '.app_state["mint"]["params"]["mint_denom"]="aphoton"' > $HOME/.laconicd/config/tmp_genesis.json && mv $HOME/.laconicd/config/tmp_genesis.json $HOME/.laconicd/config/genesis.json
-# Custom modules
-cat $HOME/.laconicd/config/genesis.json | jq '.app_state["registry"]["params"]["record_rent"]["denom"]="aphoton"' > $HOME/.laconicd/config/tmp_genesis.json && mv $HOME/.laconicd/config/tmp_genesis.json $HOME/.laconicd/config/genesis.json
-cat $HOME/.laconicd/config/genesis.json | jq '.app_state["registry"]["params"]["authority_rent"]["denom"]="aphoton"' > $HOME/.laconicd/config/tmp_genesis.json && mv $HOME/.laconicd/config/tmp_genesis.json $HOME/.laconicd/config/genesis.json
-cat $HOME/.laconicd/config/genesis.json | jq '.app_state["registry"]["params"]["authority_auction_commit_fee"]["denom"]="aphoton"' > $HOME/.laconicd/config/tmp_genesis.json && mv $HOME/.laconicd/config/tmp_genesis.json $HOME/.laconicd/config/genesis.json
-cat $HOME/.laconicd/config/genesis.json | jq '.app_state["registry"]["params"]["authority_auction_reveal_fee"]["denom"]="aphoton"' > $HOME/.laconicd/config/tmp_genesis.json && mv $HOME/.laconicd/config/tmp_genesis.json $HOME/.laconicd/config/genesis.json
-cat $HOME/.laconicd/config/genesis.json | jq '.app_state["registry"]["params"]["authority_auction_minimum_bid"]["denom"]="aphoton"' > $HOME/.laconicd/config/tmp_genesis.json && mv $HOME/.laconicd/config/tmp_genesis.json $HOME/.laconicd/config/genesis.json
-
-if [[ "$TEST_REGISTRY_EXPIRY" == "true" ]]; then
- echo "Setting timers for expiry tests."
-
- cat $HOME/.laconicd/config/genesis.json | jq '.app_state["registry"]["params"]["record_rent_duration"]="60s"' > $HOME/.laconicd/config/tmp_genesis.json && mv $HOME/.laconicd/config/tmp_genesis.json $HOME/.laconicd/config/genesis.json
- cat $HOME/.laconicd/config/genesis.json | jq '.app_state["registry"]["params"]["authority_grace_period"]="60s"' > $HOME/.laconicd/config/tmp_genesis.json && mv $HOME/.laconicd/config/tmp_genesis.json $HOME/.laconicd/config/genesis.json
- cat $HOME/.laconicd/config/genesis.json | jq '.app_state["registry"]["params"]["authority_rent_duration"]="60s"' > $HOME/.laconicd/config/tmp_genesis.json && mv $HOME/.laconicd/config/tmp_genesis.json $HOME/.laconicd/config/genesis.json
-fi
-
-if [[ "$TEST_AUCTION_ENABLED" == "true" ]]; then
- echo "Enabling auction and setting timers."
-
- cat $HOME/.laconicd/config/genesis.json | jq '.app_state["registry"]["params"]["authority_auction_enabled"]=true' > $HOME/.laconicd/config/tmp_genesis.json && mv $HOME/.laconicd/config/tmp_genesis.json $HOME/.laconicd/config/genesis.json
- cat $HOME/.laconicd/config/genesis.json | jq '.app_state["registry"]["params"]["authority_rent_duration"]="60s"' > $HOME/.laconicd/config/tmp_genesis.json && mv $HOME/.laconicd/config/tmp_genesis.json $HOME/.laconicd/config/genesis.json
- cat $HOME/.laconicd/config/genesis.json | jq '.app_state["registry"]["params"]["authority_grace_period"]="300s"' > $HOME/.laconicd/config/tmp_genesis.json && mv $HOME/.laconicd/config/tmp_genesis.json $HOME/.laconicd/config/genesis.json
- cat $HOME/.laconicd/config/genesis.json | jq '.app_state["registry"]["params"]["authority_auction_commits_duration"]="60s"' > $HOME/.laconicd/config/tmp_genesis.json && mv $HOME/.laconicd/config/tmp_genesis.json $HOME/.laconicd/config/genesis.json
- cat $HOME/.laconicd/config/genesis.json | jq '.app_state["registry"]["params"]["authority_auction_reveals_duration"]="60s"' > $HOME/.laconicd/config/tmp_genesis.json && mv $HOME/.laconicd/config/tmp_genesis.json $HOME/.laconicd/config/genesis.json
-fi
-
-# increase block time (?)
-cat $HOME/.laconicd/config/genesis.json | jq '.consensus_params["block"]["time_iota_ms"]="1000"' > $HOME/.laconicd/config/tmp_genesis.json && mv $HOME/.laconicd/config/tmp_genesis.json $HOME/.laconicd/config/genesis.json
-
-# Set gas limit in genesis
-cat $HOME/.laconicd/config/genesis.json | jq '.consensus_params["block"]["max_gas"]="10000000"' > $HOME/.laconicd/config/tmp_genesis.json && mv $HOME/.laconicd/config/tmp_genesis.json $HOME/.laconicd/config/genesis.json
-
-# disable produce empty block
-if [[ "$OSTYPE" == "darwin"* ]]; then
- sed -i '' 's/create_empty_blocks = true/create_empty_blocks = false/g' $HOME/.laconicd/config/config.toml
- else
- sed -i 's/create_empty_blocks = true/create_empty_blocks = false/g' $HOME/.laconicd/config/config.toml
-fi
-
-if [[ $1 == "pending" ]]; then
- if [[ "$OSTYPE" == "darwin"* ]]; then
- sed -i '' 's/create_empty_blocks_interval = "0s"/create_empty_blocks_interval = "30s"/g' $HOME/.laconicd/config/config.toml
- sed -i '' 's/timeout_propose = "3s"/timeout_propose = "30s"/g' $HOME/.laconicd/config/config.toml
- sed -i '' 's/timeout_propose_delta = "500ms"/timeout_propose_delta = "5s"/g' $HOME/.laconicd/config/config.toml
- sed -i '' 's/timeout_prevote = "1s"/timeout_prevote = "10s"/g' $HOME/.laconicd/config/config.toml
- sed -i '' 's/timeout_prevote_delta = "500ms"/timeout_prevote_delta = "5s"/g' $HOME/.laconicd/config/config.toml
- sed -i '' 's/timeout_precommit = "1s"/timeout_precommit = "10s"/g' $HOME/.laconicd/config/config.toml
- sed -i '' 's/timeout_precommit_delta = "500ms"/timeout_precommit_delta = "5s"/g' $HOME/.laconicd/config/config.toml
- sed -i '' 's/timeout_commit = "5s"/timeout_commit = "150s"/g' $HOME/.laconicd/config/config.toml
- sed -i '' 's/timeout_broadcast_tx_commit = "10s"/timeout_broadcast_tx_commit = "150s"/g' $HOME/.laconicd/config/config.toml
- else
- sed -i 's/create_empty_blocks_interval = "0s"/create_empty_blocks_interval = "30s"/g' $HOME/.laconicd/config/config.toml
- sed -i 's/timeout_propose = "3s"/timeout_propose = "30s"/g' $HOME/.laconicd/config/config.toml
- sed -i 's/timeout_propose_delta = "500ms"/timeout_propose_delta = "5s"/g' $HOME/.laconicd/config/config.toml
- sed -i 's/timeout_prevote = "1s"/timeout_prevote = "10s"/g' $HOME/.laconicd/config/config.toml
- sed -i 's/timeout_prevote_delta = "500ms"/timeout_prevote_delta = "5s"/g' $HOME/.laconicd/config/config.toml
- sed -i 's/timeout_precommit = "1s"/timeout_precommit = "10s"/g' $HOME/.laconicd/config/config.toml
- sed -i 's/timeout_precommit_delta = "500ms"/timeout_precommit_delta = "5s"/g' $HOME/.laconicd/config/config.toml
- sed -i 's/timeout_commit = "5s"/timeout_commit = "150s"/g' $HOME/.laconicd/config/config.toml
- sed -i 's/timeout_broadcast_tx_commit = "10s"/timeout_broadcast_tx_commit = "150s"/g' $HOME/.laconicd/config/config.toml
- fi
-fi
-
-# Allocate genesis accounts (cosmos formatted addresses)
-laconicd add-genesis-account $KEY 100000000000000000000000000aphoton --keyring-backend $KEYRING
-
-# Sign genesis transaction
-laconicd gentx $KEY 1000000000000000000000aphoton --keyring-backend $KEYRING --chain-id $CHAINID
-
-# Collect genesis tx
-laconicd collect-gentxs
-
-# Run this to ensure everything worked and that the genesis file is setup correctly
-laconicd validate-genesis
-
-if [[ $1 == "pending" ]]; then
- echo "pending mode is on, please wait for the first block committed."
-fi
-
-# Start the node (remove the --pruning=nothing flag if historical queries are not needed)
-laconicd start --pruning=nothing --evm.tracer=json $TRACE --log_level $LOGLEVEL --minimum-gas-prices=0.0001aphoton --json-rpc.api eth,txpool,personal,net,debug,web3,miner --api.enable --gql-server --gql-playground
diff --git a/app/data/config/mainnet-laconicd/export-myaddress.sh b/app/data/config/mainnet-laconicd/scripts/export-myaddress.sh
similarity index 100%
rename from app/data/config/mainnet-laconicd/export-myaddress.sh
rename to app/data/config/mainnet-laconicd/scripts/export-myaddress.sh
diff --git a/app/data/config/mainnet-laconicd/export-mykey.sh b/app/data/config/mainnet-laconicd/scripts/export-mykey.sh
similarity index 100%
rename from app/data/config/mainnet-laconicd/export-mykey.sh
rename to app/data/config/mainnet-laconicd/scripts/export-mykey.sh
diff --git a/app/data/config/mainnet-laconicd/scripts/run-laconicd.sh b/app/data/config/mainnet-laconicd/scripts/run-laconicd.sh
new file mode 100755
index 00000000..50a82c69
--- /dev/null
+++ b/app/data/config/mainnet-laconicd/scripts/run-laconicd.sh
@@ -0,0 +1,18 @@
+#!/bin/sh
+if [[ -n "$CERC_SCRIPT_DEBUG" ]]; then
+ set -x
+fi
+
+#TODO: pass these in from the caller
+TRACE="--trace"
+LOGLEVEL="info"
+
+laconicd start \
+ --pruning=nothing \
+ --evm.tracer=json $TRACE \
+ --log_level $LOGLEVEL \
+ --minimum-gas-prices=0.0001aphoton \
+ --json-rpc.api eth,txpool,personal,net,debug,web3,miner \
+ --api.enable \
+ --gql-server \
+ --gql-playground
diff --git a/app/data/config/nitro-contracts/deploy.sh b/app/data/config/nitro-contracts/deploy.sh
new file mode 100755
index 00000000..f4c896f7
--- /dev/null
+++ b/app/data/config/nitro-contracts/deploy.sh
@@ -0,0 +1,57 @@
+#!/bin/bash
+
+set -e
+if [ -n "$CERC_SCRIPT_DEBUG" ]; then
+ set -x
+fi
+
+nitro_addresses_file="/app/deployment/nitro-addresses.json"
+
+# Check if CERC_NA_ADDRESS environment variable set to skip contract deployment
+if [ -n "$CERC_NA_ADDRESS" ]; then
+ echo "CERC_NA_ADDRESS is set to '$CERC_NA_ADDRESS'"
+ echo "CERC_VPA_ADDRESS is set to '$CERC_VPA_ADDRESS'"
+ echo "CERC_CA_ADDRESS is set to '$CERC_CA_ADDRESS'"
+ echo "Skipping Nitro contracts deployment"
+ exit
+fi
+
+# Check and exit if a deployment already exists (on restarts)
+if [ -f ${nitro_addresses_file} ]; then
+ echo "${nitro_addresses_file} already exists, skipping Nitro contracts deployment"
+ cat ${nitro_addresses_file}
+ exit
+fi
+
+echo "Using ETH RPC endpoint ${CERC_ETH_RPC_ENDPOINT}"
+
+# Wait till ETH RPC endpoint is available with block number > 1
+retry_interval=5
+while true; do
+ block_number_hex=$(curl -s -X POST -H "Content-Type: application/json" --data '{"jsonrpc":"2.0","method":"eth_blockNumber","params":[],"id":1}' ${CERC_ETH_RPC_ENDPOINT} | jq -r '.result')
+
+ # Check if the request call was successful
+ if [ $? -ne 0 ]; then
+ echo "RPC endpoint not yet available, retrying in $retry_interval seconds..."
+ sleep $retry_interval
+ continue
+ fi
+
+ # Convert hex to decimal
+ block_number_dec=$(printf %u ${block_number_hex})
+
+ # Check if block number is > 1 to avoid failures in the deployment
+ if [ "$block_number_dec" -ge 1 ]; then
+ echo "RPC endpoint is up"
+ break
+ else
+ echo "RPC endpoint not yet available, retrying in $retry_interval seconds..."
+ sleep $retry_interval
+ continue
+ fi
+done
+
+echo "Using CERC_PRIVATE_KEY_DEPLOYER from env"
+
+yarn test:deploy-contracts --chainurl ${CERC_ETH_RPC_ENDPOINT} --key ${CERC_PRIVATE_KEY_DEPLOYER} --addressesFilePath ${nitro_addresses_file}
+cat ${nitro_addresses_file}
diff --git a/app/data/config/ponder/ponder-start.sh b/app/data/config/ponder/ponder-start.sh
new file mode 100755
index 00000000..5fa5c491
--- /dev/null
+++ b/app/data/config/ponder/ponder-start.sh
@@ -0,0 +1,69 @@
+#!/bin/bash
+
+set -e
+if [ -n "$CERC_SCRIPT_DEBUG" ]; then
+ set -x
+fi
+
+# Wait till RPC endpoint is available
+retry_interval=5
+while true; do
+ rpc_response=$(curl -s -o /dev/null -w '%{http_code}' ${PONDER_RPC_URL_1})
+ if [ ${rpc_response} = 200 ]; then
+ echo "RPC endpoint is available"
+ break
+ fi
+
+ echo "RPC endpoint not yet available, retrying in $retry_interval seconds..."
+ sleep $retry_interval
+done
+
+nitro_addresses_file="/nitro/nitro-addresses.json"
+nitro_addresses_destination_file="/app/examples/token-erc20/nitro-addresses.json"
+
+# Check if CERC_NA_ADDRESS environment variable is set
+if [ -n "$CERC_NA_ADDRESS" ]; then
+ echo "CERC_NA_ADDRESS is set to '$CERC_NA_ADDRESS'"
+ echo "CERC_VPA_ADDRESS is set to '$CERC_VPA_ADDRESS'"
+ echo "CERC_CA_ADDRESS is set to '$CERC_CA_ADDRESS'"
+ echo "Using the above Nitro addresses"
+
+ # Create the required JSON and write it to a file
+ nitro_addresses_json=$(jq -n \
+ --arg na "$CERC_NA_ADDRESS" \
+ --arg vpa "$CERC_VPA_ADDRESS" \
+ --arg ca "$CERC_CA_ADDRESS" \
+ '.nitroAdjudicatorAddress = $na | .virtualPaymentAppAddress = $vpa | .consensusAppAddress = $ca')
+ echo "$nitro_addresses_json" > "${nitro_addresses_destination_file}"
+elif [ -f ${nitro_addresses_file} ]; then
+ echo "Using Nitro addresses from ${nitro_addresses_file}:"
+ cat "$nitro_addresses_file"
+ cat "$nitro_addresses_file" > "$nitro_addresses_destination_file"
+else
+ echo "Nitro addresses not available"
+ exit 1
+fi
+
+echo "Using CERC_PONDER_NITRO_PK from env for Nitro account"
+echo "Using CERC_PONDER_NITRO_CHAIN_PK (account with funds) from env for sending Nitro txs"
+echo "Using ${CERC_PONDER_NITRO_CHAIN_URL} as the RPC endpoint for Nitro txs"
+
+# If not set, check the mounted volume for relay peer id
+if [ -z "$CERC_RELAY_MULTIADDR" ]; then
+ echo "CERC_RELAY_MULTIADDR not provided, taking from the mounted volume"
+ CERC_RELAY_MULTIADDR="/dns4/mobymask-watcher-server/tcp/9090/ws/p2p/$(jq -r '.id' /peers/relay-id.json)"
+fi
+
+env_file='.env.local'
+echo "PONDER_CHAIN_ID=\"$PONDER_CHAIN_ID\"" > "$env_file"
+echo "PONDER_RPC_URL_1=\"$PONDER_RPC_URL_1\"" >> "$env_file"
+echo "CERC_PONDER_NITRO_PK=\"$CERC_PONDER_NITRO_PK\"" >> "$env_file"
+echo "CERC_PONDER_NITRO_CHAIN_PK=\"$CERC_PONDER_NITRO_CHAIN_PK\"" >> "$env_file"
+echo "CERC_PONDER_NITRO_CHAIN_URL=\"$CERC_PONDER_NITRO_CHAIN_URL\"" >> "$env_file"
+echo "CERC_RELAY_MULTIADDR=\"$CERC_RELAY_MULTIADDR\"" >> "$env_file"
+echo "CERC_UPSTREAM_NITRO_ADDRESS=\"$CERC_UPSTREAM_NITRO_ADDRESS\"" >> "$env_file"
+echo "CERC_UPSTREAM_NITRO_MULTIADDR=\"$CERC_UPSTREAM_NITRO_MULTIADDR\"" >> "$env_file"
+echo "CERC_UPSTREAM_NITRO_PAY_AMOUNT=\"$CERC_UPSTREAM_NITRO_PAY_AMOUNT\"" >> "$env_file"
+
+# Keep the container running
+tail -f
diff --git a/app/data/config/ponder/ponder.config.ts b/app/data/config/ponder/ponder.config.ts
new file mode 100644
index 00000000..c3a80830
--- /dev/null
+++ b/app/data/config/ponder/ponder.config.ts
@@ -0,0 +1,37 @@
+import type { Config } from "@ponder/core";
+
+import contractAddresses from "./nitro-addresses.json";
+
+export const config: Config = {
+ networks: [
+ {
+ name: "fixturenet",
+ chainId: Number(process.env.PONDER_CHAIN_ID),
+ rpcUrl: process.env.PONDER_RPC_URL_1,
+ maxRpcRequestConcurrency: 1,
+ },
+ ],
+ contracts: [
+ {
+ name: "AdventureGold",
+ network: "fixturenet",
+ abi: "./abis/AdventureGold.json",
+ address: "0x32353A6C91143bfd6C7d363B546e62a9A2489A20",
+ startBlock: 5,
+ maxBlockRange: 100,
+ },
+ ],
+ nitro: {
+ privateKey: process.env.CERC_PONDER_NITRO_PK!,
+ chainPrivateKey: process.env.CERC_PONDER_NITRO_CHAIN_PK!,
+ chainURL: process.env.CERC_PONDER_NITRO_CHAIN_URL!,
+ contractAddresses,
+ relayMultiAddr: process.env.CERC_RELAY_MULTIADDR!,
+ store: "./.ponder/nitro-db",
+ rpcNitroNode: {
+ address: process.env.CERC_UPSTREAM_NITRO_ADDRESS!,
+ multiAddr: process.env.CERC_UPSTREAM_NITRO_MULTIADDR!,
+ },
+ payAmount: process.env.CERC_UPSTREAM_NITRO_PAY_AMOUNT!,
+ },
+};
diff --git a/app/data/config/sushiswap-subgraph-v3/filecoin.js b/app/data/config/sushiswap-subgraph-v3/filecoin.js
new file mode 100644
index 00000000..00dc005a
--- /dev/null
+++ b/app/data/config/sushiswap-subgraph-v3/filecoin.js
@@ -0,0 +1,30 @@
+const NATIVE_ADDRESS = '0x60e1773636cf5e4a227d9ac24f20feca034ee25a'
+const USDC_ADDRESS = '0xeb466342c4d449bc9f53a865d5cb90586f405215'
+const DAI_ADDRESS = '0x5c7e299cf531eb66f2a1df637d37abb78e6200c7'
+
+module.exports = {
+ network: 'filecoin',
+ blocks: {
+ address: '0x719e14fcb364bb05649bd525eb6c4a2d0d4ea2b7',
+ startBlock: 2867000,
+ },
+ v3: {
+ factory: { // 0xb4fbf271143f4fbf7b91a5ded31805e42b2208d6
+ address: '0xc35dadb65012ec5796536bd9864ed8773abc74c4',
+ startBlock: 2867560,
+ },
+ positionManager: {
+ address: '0xf4d73326c13a4fc5fd7a064217e12780e9bd62c3',
+ startBlock: 2868037,
+ },
+ native: { address: NATIVE_ADDRESS },
+ whitelistedTokenAddresses: [
+ NATIVE_ADDRESS,
+ USDC_ADDRESS,
+ DAI_ADDRESS,
+ ],
+ stableTokenAddresses: [USDC_ADDRESS, DAI_ADDRESS],
+ nativePricePool: '0x1d1375281265e4dd496d90455f7c82f4fbd85cc2',
+ minimumEthLocked: 250
+ },
+}
diff --git a/app/data/config/sushiswap-subgraph-v3/run-blocks.sh b/app/data/config/sushiswap-subgraph-v3/run-blocks.sh
new file mode 100755
index 00000000..72af062a
--- /dev/null
+++ b/app/data/config/sushiswap-subgraph-v3/run-blocks.sh
@@ -0,0 +1,15 @@
+#!/bin/bash
+
+set -e
+
+echo "Building blocks subgraph and deploying to graph-node..."
+
+cd blocks
+
+pnpm run generate
+pnpm run build
+
+pnpm exec graph create --node http://graph-node:8020/ sushiswap/blocks
+pnpm exec graph deploy --node http://graph-node:8020/ --ipfs http://ipfs:5001 --version-label 0.1.0 sushiswap/blocks
+
+echo "Done"
diff --git a/app/data/config/sushiswap-subgraph-v3/run-v3.sh b/app/data/config/sushiswap-subgraph-v3/run-v3.sh
index 8ca01460..2c34a028 100755
--- a/app/data/config/sushiswap-subgraph-v3/run-v3.sh
+++ b/app/data/config/sushiswap-subgraph-v3/run-v3.sh
@@ -2,38 +2,14 @@
set -e
-# Loop until the NFPM deployment is detected
-echo "Waiting for sushiswap-periphery deployments to occur"
-while [ ! -f ./deployments/docker/NonfungiblePositionManager.json ]; do
- sleep 5
-done
+echo "Building v3 subgraph and deploying to graph-node..."
-echo "Reading contract addresses and block numbers from deployments"
-FACTORY_ADDRESS=$(jq -r '.address' ./core-deployments/docker/UniswapV3Factory.json)
-FACTORY_BLOCK=$(jq -r '.receipt.blockNumber' ./core-deployments/docker/UniswapV3Factory.json)
-NATIVE_ADDRESS=$(jq -r '.address' ./deployments/docker/WFIL.json)
-NFPM_ADDRESS=$(jq -r '.address' ./deployments/docker/NonfungiblePositionManager.json)
-NFPM_BLOCK=$(jq -r '.receipt.blockNumber' ./deployments/docker/NonfungiblePositionManager.json)
+cd v3
-# Read the JavaScript file content
-file_content=$( /app/config/lotus-fixturenet.js
-
-
-echo "Building subgraph and deploying to graph-node..."
pnpm run generate
pnpm run build
-pnpm exec graph create --node http://graph-node:8020/ sushiswap/v3-lotus
-pnpm exec graph deploy --node http://graph-node:8020/ --ipfs http://ipfs:5001 --version-label 0.1.0 sushiswap/v3-lotus
+
+pnpm exec graph create --node http://graph-node:8020/ sushiswap/v3-filecoin
+pnpm exec graph deploy --node http://graph-node:8020/ --ipfs http://ipfs:5001 --version-label 0.1.0 sushiswap/v3-filecoin
echo "Done"
diff --git a/app/data/config/watcher-mobymask-v2/mobymask-app-start.sh b/app/data/config/watcher-mobymask-v2/mobymask-app-start.sh
index d6f0d452..3623d0ba 100755
--- a/app/data/config/watcher-mobymask-v2/mobymask-app-start.sh
+++ b/app/data/config/watcher-mobymask-v2/mobymask-app-start.sh
@@ -8,6 +8,7 @@ CERC_CHAIN_ID="${CERC_CHAIN_ID:-${DEFAULT_CERC_CHAIN_ID}}"
CERC_DEPLOYED_CONTRACT="${CERC_DEPLOYED_CONTRACT:-${DEFAULT_CERC_DEPLOYED_CONTRACT}}"
CERC_RELAY_NODES="${CERC_RELAY_NODES:-${DEFAULT_CERC_RELAY_NODES}}"
CERC_DENY_MULTIADDRS="${CERC_DENY_MULTIADDRS:-${DEFAULT_CERC_DENY_MULTIADDRS}}"
+CERC_PUBSUB="${CERC_PUBSUB:-${DEFAULT_CERC_PUBSUB}}"
CERC_APP_WATCHER_URL="${CERC_APP_WATCHER_URL:-${DEFAULT_CERC_APP_WATCHER_URL}}"
# If not set (or []), check the mounted volume for relay peer id
@@ -41,7 +42,8 @@ jq --arg address "$CERC_DEPLOYED_CONTRACT" \
--argjson chainId "$CERC_CHAIN_ID" \
--argjson relayNodes "$CERC_RELAY_NODES" \
--argjson denyMultiaddrs "$CERC_DENY_MULTIADDRS" \
- '.address = $address | .chainId = $chainId | .relayNodes = $relayNodes | .peer.denyMultiaddrs = $denyMultiaddrs' \
+ --arg pubsub "$CERC_PUBSUB" \
+ '.address = $address | .chainId = $chainId | .relayNodes = $relayNodes | .peer.denyMultiaddrs = $denyMultiaddrs | .peer.pubsub = $pubsub' \
/app/src/mobymask-app-config.json > /app/${CERC_CONFIG_FILE}
if [ "${CERC_USE_NPM}" = "true" ]; then
diff --git a/app/data/config/watcher-mobymask-v2/mobymask-params.env b/app/data/config/watcher-mobymask-v2/mobymask-params.env
index 5e4d9fb6..2cf65c7e 100644
--- a/app/data/config/watcher-mobymask-v2/mobymask-params.env
+++ b/app/data/config/watcher-mobymask-v2/mobymask-params.env
@@ -27,3 +27,6 @@ DEFAULT_CERC_RELAY_NODES=[]
# Set of multiaddrs to be avoided while dialling
DEFAULT_CERC_DENY_MULTIADDRS=[]
+
+# Type of pubsub to be used
+DEFAULT_CERC_PUBSUB=""
diff --git a/app/data/config/watcher-mobymask-v2/start-server.sh b/app/data/config/watcher-mobymask-v2/start-server.sh
index eab8bac2..110265bc 100755
--- a/app/data/config/watcher-mobymask-v2/start-server.sh
+++ b/app/data/config/watcher-mobymask-v2/start-server.sh
@@ -9,6 +9,7 @@ CERC_L1_ACCOUNTS_CSV_URL="${CERC_L1_ACCOUNTS_CSV_URL:-${DEFAULT_CERC_L1_ACCOUNTS
CERC_RELAY_PEERS="${CERC_RELAY_PEERS:-${DEFAULT_CERC_RELAY_PEERS}}"
CERC_DENY_MULTIADDRS="${CERC_DENY_MULTIADDRS:-${DEFAULT_CERC_DENY_MULTIADDRS}}"
+CERC_PUBSUB="${CERC_PUBSUB:-${DEFAULT_CERC_PUBSUB}}"
CERC_RELAY_ANNOUNCE_DOMAIN="${CERC_RELAY_ANNOUNCE_DOMAIN:-${DEFAULT_CERC_RELAY_ANNOUNCE_DOMAIN}}"
CERC_ENABLE_PEER_L2_TXS="${CERC_ENABLE_PEER_L2_TXS:-${DEFAULT_CERC_ENABLE_PEER_L2_TXS}}"
CERC_DEPLOYED_CONTRACT="${CERC_DEPLOYED_CONTRACT:-${DEFAULT_CERC_DEPLOYED_CONTRACT}}"
@@ -50,6 +51,7 @@ WATCHER_CONFIG_TEMPLATE=$(cat environments/watcher-config-template.toml)
WATCHER_CONFIG=$(echo "$WATCHER_CONFIG_TEMPLATE" | \
sed -E "s|REPLACE_WITH_CERC_RELAY_PEERS|${CERC_RELAY_PEERS}|g; \
s|REPLACE_WITH_CERC_DENY_MULTIADDRS|${CERC_DENY_MULTIADDRS}|g; \
+ s/REPLACE_WITH_CERC_PUBSUB/${CERC_PUBSUB}/g; \
s/REPLACE_WITH_CERC_RELAY_ANNOUNCE_DOMAIN/${CERC_RELAY_ANNOUNCE_DOMAIN}/g; \
s|REPLACE_WITH_CERC_RELAY_MULTIADDR|${CERC_RELAY_MULTIADDR}|g; \
s/REPLACE_WITH_CERC_ENABLE_PEER_L2_TXS/${CERC_ENABLE_PEER_L2_TXS}/g; \
diff --git a/app/data/config/watcher-mobymask-v2/watcher-config-template.toml b/app/data/config/watcher-mobymask-v2/watcher-config-template.toml
index 5a2c7ce4..a0dd3b34 100644
--- a/app/data/config/watcher-mobymask-v2/watcher-config-template.toml
+++ b/app/data/config/watcher-mobymask-v2/watcher-config-template.toml
@@ -30,6 +30,7 @@
denyMultiaddrs = REPLACE_WITH_CERC_DENY_MULTIADDRS
peerIdFile = './peers/relay-id.json'
announce = 'REPLACE_WITH_CERC_RELAY_ANNOUNCE_DOMAIN'
+ pubsub = 'REPLACE_WITH_CERC_PUBSUB'
enableDebugInfo = true
[server.p2p.peer]
@@ -37,6 +38,7 @@
pubSubTopic = 'mobymask'
denyMultiaddrs = REPLACE_WITH_CERC_DENY_MULTIADDRS
peerIdFile = './peers/peer-id.json'
+ pubsub = 'REPLACE_WITH_CERC_PUBSUB'
enableDebugInfo = true
enableL2Txs = REPLACE_WITH_CERC_ENABLE_PEER_L2_TXS
diff --git a/app/data/config/watcher-mobymask-v3/deploy-and-generate-invite.sh b/app/data/config/watcher-mobymask-v3/deploy-and-generate-invite.sh
new file mode 100755
index 00000000..7ac5fd9a
--- /dev/null
+++ b/app/data/config/watcher-mobymask-v3/deploy-and-generate-invite.sh
@@ -0,0 +1,78 @@
+#!/bin/bash
+
+set -e
+if [ -n "$CERC_SCRIPT_DEBUG" ]; then
+ set -x
+fi
+
+CERC_ETH_RPC_ENDPOINT="${CERC_ETH_RPC_ENDPOINT:-${DEFAULT_CERC_ETH_RPC_ENDPOINT}}"
+CERC_MOBYMASK_APP_BASE_URI="${CERC_MOBYMASK_APP_BASE_URI:-${DEFAULT_CERC_MOBYMASK_APP_BASE_URI}}"
+CERC_DEPLOYED_CONTRACT="${CERC_DEPLOYED_CONTRACT:-${DEFAULT_CERC_DEPLOYED_CONTRACT}}"
+
+# Check if CERC_DEPLOYED_CONTRACT environment variable set to skip contract deployment
+if [ -n "$CERC_DEPLOYED_CONTRACT" ]; then
+ echo "CERC_DEPLOYED_CONTRACT is set to '$CERC_DEPLOYED_CONTRACT'"
+ echo "Skipping contract deployment"
+ exit 0
+fi
+
+echo "Using ETH RPC endpoint ${CERC_ETH_RPC_ENDPOINT}"
+
+# Wait till ETH RPC endpoint is available with block number > 1
+retry_interval=5
+while true; do
+ block_number_hex=$(curl -s -X POST -H "Content-Type: application/json" --data '{"jsonrpc":"2.0","method":"eth_blockNumber","params":[],"id":1}' ${CERC_ETH_RPC_ENDPOINT} | jq -r '.result')
+
+ # Check if the request call was successful
+ if [ $? -ne 0 ]; then
+ echo "RPC endpoint not yet available, retrying in $retry_interval seconds..."
+ sleep $retry_interval
+ continue
+ fi
+
+ # Convert hex to decimal
+ block_number_dec=$(printf %u ${block_number_hex})
+
+ # Check if block number is > 1 to avoid failures in the deployment
+ if [ "$block_number_dec" -ge 1 ]; then
+ echo "RPC endpoint is up"
+ break
+ else
+ echo "RPC endpoint not yet available, retrying in $retry_interval seconds..."
+ sleep $retry_interval
+ continue
+ fi
+done
+
+echo "Using CERC_PRIVATE_KEY_DEPLOYER from env"
+
+# Create the required JSON and write it to a file
+secrets_file="secrets.json"
+secrets_json=$(jq -n \
+ --arg privateKey "$CERC_PRIVATE_KEY_DEPLOYER" \
+ --arg rpcUrl "$CERC_ETH_RPC_ENDPOINT" \
+ --arg baseURI "$CERC_MOBYMASK_APP_BASE_URI" \
+ '.privateKey = $privateKey | .rpcUrl = $rpcUrl | .baseURI = $baseURI')
+echo "$secrets_json" > "${secrets_file}"
+
+export RPC_URL="${CERC_ETH_RPC_ENDPOINT}"
+
+# Check and exit if a deployment already exists (on restarts)
+if [ -f ./config.json ]; then
+ echo "config.json already exists, checking the contract deployment"
+
+ # Read JSON file
+ deployment_details=$(cat config.json)
+ deployed_contract=$(echo "$deployment_details" | jq -r '.address')
+
+ cd ../hardhat
+ if yarn verifyDeployment --network optimism --contract "${deployed_contract}"; then
+ echo "Deployment verfication successful"
+ cd ../server
+ else
+ echo "Deployment verfication failed, please clear MobyMask deployment volume before starting"
+ exit 1
+ fi
+fi
+
+npm run deployAndGenerateInvite
diff --git a/app/data/config/watcher-mobymask-v3/keys/12D3KooWAMjBkFCT9DtCnSDcxftxJzSuTBvzVojabv64cnEvX4AZ.json b/app/data/config/watcher-mobymask-v3/keys/12D3KooWAMjBkFCT9DtCnSDcxftxJzSuTBvzVojabv64cnEvX4AZ.json
new file mode 100644
index 00000000..84465346
--- /dev/null
+++ b/app/data/config/watcher-mobymask-v3/keys/12D3KooWAMjBkFCT9DtCnSDcxftxJzSuTBvzVojabv64cnEvX4AZ.json
@@ -0,0 +1,11 @@
+{
+ "peerId": {
+ "id": "12D3KooWAMjBkFCT9DtCnSDcxftxJzSuTBvzVojabv64cnEvX4AZ",
+ "privKey": "CAESQAKCrnY0QKTky1I18fqn+VPydXGUv1NYiV+nVKqBFkw/CAjE9sKKIDGnYAo8mivnI6dngFenERY+0Q8AJrPTaXY=",
+ "pubKey": "CAESIAgIxPbCiiAxp2AKPJor5yOnZ4BXpxEWPtEPACaz02l2"
+ },
+ "consensus": {
+ "publicKey": "02cd17b05ca998955be5ca7bf4fd4531243d438f1aae7ce8a0ed5159f53cee5b40",
+ "privateKey": "67d80505614bdf61fca11cbad31d93acb2c7df1c653dc25975d77d05f05f154f"
+ }
+}
diff --git a/app/data/config/watcher-mobymask-v3/keys/12D3KooWBNEbY3QS4y23ngupDw9PDc4bvNvRJGVRejjV9EZLjux5.json b/app/data/config/watcher-mobymask-v3/keys/12D3KooWBNEbY3QS4y23ngupDw9PDc4bvNvRJGVRejjV9EZLjux5.json
new file mode 100644
index 00000000..8eea1872
--- /dev/null
+++ b/app/data/config/watcher-mobymask-v3/keys/12D3KooWBNEbY3QS4y23ngupDw9PDc4bvNvRJGVRejjV9EZLjux5.json
@@ -0,0 +1,11 @@
+{
+ "peerId": {
+ "id": "12D3KooWBNEbY3QS4y23ngupDw9PDc4bvNvRJGVRejjV9EZLjux5",
+ "privKey": "CAESQGSTw0ymvn8+wX9Dbvyr4/Gib1q2voe0CC0VyeClMQP6FwW14x0fpRbBIx0XhLdxWHkRndphVg3gVAHyC+7ZI8o=",
+ "pubKey": "CAESIBcFteMdH6UWwSMdF4S3cVh5EZ3aYVYN4FQB8gvu2SPK"
+ },
+ "consensus": {
+ "publicKey": "029c8035b3e9401b8f178f7c37285b5cb22501e017340e2058b3b842f2a1f0ae45",
+ "privateKey": "0261008e8e3ec808168e99333599da38ca59a056a2ae4510a6ad3d8b5cb0918c"
+ }
+}
diff --git a/app/data/config/watcher-mobymask-v3/keys/12D3KooWSRH6ftgkAZsKZK7UX1Zr6Hx6YAsEepHqzopFszqfTxxi.json b/app/data/config/watcher-mobymask-v3/keys/12D3KooWSRH6ftgkAZsKZK7UX1Zr6Hx6YAsEepHqzopFszqfTxxi.json
new file mode 100644
index 00000000..80721faa
--- /dev/null
+++ b/app/data/config/watcher-mobymask-v3/keys/12D3KooWSRH6ftgkAZsKZK7UX1Zr6Hx6YAsEepHqzopFszqfTxxi.json
@@ -0,0 +1,11 @@
+{
+ "peerId": {
+ "id": "12D3KooWSRH6ftgkAZsKZK7UX1Zr6Hx6YAsEepHqzopFszqfTxxi",
+ "privKey": "CAESQHBjlHxfVhZ2gXsBItrIEEgSGKcjMkFiGs3PPz9E3ace9qyWEkvR4oit5ve9SAROVoh20hoa42IC91NIafMaqws=",
+ "pubKey": "CAESIPaslhJL0eKIreb3vUgETlaIdtIaGuNiAvdTSGnzGqsL"
+ },
+ "consensus": {
+ "publicKey": "039160c244a7ad8be16a64bdb69e6dbacdcfe20b37076792a0d06032a8097468ca",
+ "privateKey": "8894685fe81001d75662b079905472699373967451d1255ee5fc669d0a09a9ca"
+ }
+}
diff --git a/app/data/config/watcher-mobymask-v3/mobymask-app-start.sh b/app/data/config/watcher-mobymask-v3/mobymask-app-start.sh
new file mode 100644
index 00000000..73990965
--- /dev/null
+++ b/app/data/config/watcher-mobymask-v3/mobymask-app-start.sh
@@ -0,0 +1,86 @@
+#!/bin/bash
+
+set -e
+if [ -n "$CERC_SCRIPT_DEBUG" ]; then
+ set -x
+fi
+
+CERC_CHAIN_ID="${CERC_CHAIN_ID:-${DEFAULT_CERC_CHAIN_ID}}"
+CERC_DEPLOYED_CONTRACT="${CERC_DEPLOYED_CONTRACT:-${DEFAULT_CERC_DEPLOYED_CONTRACT}}"
+CERC_RELAY_NODES="${CERC_RELAY_NODES:-${DEFAULT_CERC_RELAY_NODES}}"
+CERC_DENY_MULTIADDRS="${CERC_DENY_MULTIADDRS:-${DEFAULT_CERC_DENY_MULTIADDRS}}"
+CERC_PUBSUB="${CERC_PUBSUB:-${DEFAULT_CERC_PUBSUB}}"
+CERC_GOSSIPSUB_DIRECT_PEERS="${CERC_GOSSIPSUB_DIRECT_PEERS:-${DEFAULT_CERC_GOSSIPSUB_DIRECT_PEERS}}"
+CERC_APP_WATCHER_URL="${CERC_APP_WATCHER_URL:-${DEFAULT_CERC_APP_WATCHER_URL}}"
+CERC_SNAP_URL="${CERC_SNAP_URL:-${DEFAULT_CERC_SNAP_URL}}"
+
+# If not set (or []), check the mounted volume for relay peer id
+if [ -z "$CERC_RELAY_NODES" ] || [ "$CERC_RELAY_NODES" = "[]" ]; then
+ echo "CERC_RELAY_NODES not provided, taking from the mounted volume"
+ CERC_RELAY_NODES="[\"/ip4/127.0.0.1/tcp/9090/ws/p2p/$(jq -r '.id' /peers/relay-id.json)\"]"
+fi
+
+echo "Using CERC_RELAY_NODES $CERC_RELAY_NODES"
+
+if [ -z "$CERC_DEPLOYED_CONTRACT" ]; then
+ # Use config from mounted volume (when running web-app along with watcher stack)
+ echo "Taking config for deployed contract from mounted volume"
+ while [ ! -f /server/config.json ]; do
+ echo "Config not found, retrying in 5 seconds..."
+ sleep 5
+ done
+
+ # Get deployed contract address and chain id
+ CERC_DEPLOYED_CONTRACT=$(jq -r '.address' /server/config.json | tr -d '"')
+ CERC_CHAIN_ID=$(jq -r '.chainId' /server/config.json)
+else
+ echo "Using CERC_DEPLOYED_CONTRACT ${CERC_DEPLOYED_CONTRACT} from env as the MobyMask contract address"
+fi
+
+nitro_addresses_file="/nitro/nitro-addresses.json"
+nitro_addresses_destination_file="/app/src/utils/nitro-addresses.json"
+
+# Check if CERC_NA_ADDRESS environment variable is set
+if [ -n "$CERC_NA_ADDRESS" ]; then
+ echo "CERC_NA_ADDRESS is set to '$CERC_NA_ADDRESS'"
+ echo "CERC_VPA_ADDRESS is set to '$CERC_VPA_ADDRESS'"
+ echo "CERC_CA_ADDRESS is set to '$CERC_CA_ADDRESS'"
+ echo "Using the above Nitro addresses"
+
+ # Create the required JSON and write it to a file
+ nitro_addresses_json=$(jq -n \
+ --arg na "$CERC_NA_ADDRESS" \
+ --arg vpa "$CERC_VPA_ADDRESS" \
+ --arg ca "$CERC_CA_ADDRESS" \
+ '.nitroAdjudicatorAddress = $na | .virtualPaymentAppAddress = $vpa | .consensusAppAddress = $ca')
+ echo "$nitro_addresses_json" > "${nitro_addresses_destination_file}"
+elif [ -f ${nitro_addresses_file} ]; then
+ echo "Using Nitro addresses from ${nitro_addresses_file}:"
+ cat "$nitro_addresses_file"
+ cat "$nitro_addresses_file" > "$nitro_addresses_destination_file"
+else
+ echo "Nitro addresses not available"
+ exit 1
+fi
+
+# Export config values in a json file
+app_config_file="/app/src/utils/config.json"
+app_config_json=$(jq -n \
+ --arg name "MobyMask" \
+ --argjson enableDebugInfo true \
+ --arg address "$CERC_DEPLOYED_CONTRACT" \
+ --argjson chainId "$CERC_CHAIN_ID" \
+ --argjson relayNodes "$CERC_RELAY_NODES" \
+ --argjson denyMultiaddrs "$CERC_DENY_MULTIADDRS" \
+ --arg pubsub "$CERC_PUBSUB" \
+ --argjson directPeers "$CERC_GOSSIPSUB_DIRECT_PEERS" \
+ '.name = $name | .address = $address | .chainId = $chainId | .relayNodes = $relayNodes | .peer.enableDebugInfo = $enableDebugInfo | .peer.denyMultiaddrs = $denyMultiaddrs | .peer.pubsub = $pubsub | .peer.directPeers = $directPeers')
+echo "$app_config_json" > "${app_config_file}"
+
+REACT_APP_DEBUG_PEER=true \
+REACT_APP_WATCHER_URI="$CERC_APP_WATCHER_URL/graphql" \
+REACT_APP_PAY_TO_NITRO_ADDRESS="$CERC_PAYMENT_NITRO_ADDRESS" \
+REACT_APP_SNAP_ORIGIN="local:$CERC_SNAP_URL" \
+yarn build
+
+http-server -p 80 /app/build
diff --git a/app/data/config/watcher-mobymask-v3/mobymask-params.env b/app/data/config/watcher-mobymask-v3/mobymask-params.env
new file mode 100644
index 00000000..f96aae31
--- /dev/null
+++ b/app/data/config/watcher-mobymask-v3/mobymask-params.env
@@ -0,0 +1,50 @@
+# Defaults
+
+# ETH RPC endpoint used for contract(s) deployment
+DEFAULT_CERC_ETH_RPC_ENDPOINT="http://fixturenet-eth-geth-1:8545"
+
+# ETH RPC endpoint used for queries in the watcher
+DEFAULT_CERC_ETH_RPC_QUERY_ENDPOINT="http://nitro-reverse-payment-proxy:8081"
+
+# ETH RPC endpoint used for mutations in the watcher
+DEFAULT_CERC_ETH_RPC_MUTATION_ENDPOINT="http://fixturenet-eth-geth-1:8545"
+
+# Set of relay peers to connect to from the relay node
+DEFAULT_CERC_RELAY_PEERS=[]
+
+# Domain to be used in the relay node's announce address
+DEFAULT_CERC_RELAY_ANNOUNCE_DOMAIN=
+
+# Base URI for mobymask-app (used for generating invite)
+DEFAULT_CERC_MOBYMASK_APP_BASE_URI="http://127.0.0.1:3004/#"
+
+# Set to false for disabling watcher peer to send txs to L2
+DEFAULT_CERC_ENABLE_PEER_L2_TXS=true
+
+# Set deployed MobyMask contract address to avoid deploying contract in stack
+# mobymask-app will use this contract address in config if run separately
+DEFAULT_CERC_DEPLOYED_CONTRACT=
+
+# Chain ID is used by mobymask web-app for txs
+DEFAULT_CERC_CHAIN_ID=1212
+
+# Watcher endpoint used by the web-app
+DEFAULT_CERC_APP_WATCHER_URL="http://localhost:3001"
+
+# MobyMask snap URL to be used by the web-app
+DEFAULT_CERC_SNAP_URL=http://localhost:8080
+
+# Set of relay nodes to be used by web-apps
+DEFAULT_CERC_RELAY_NODES=[]
+
+# Set of multiaddrs to be avoided while dialling
+DEFAULT_CERC_DENY_MULTIADDRS=[]
+
+# Type of pubsub to be used
+DEFAULT_CERC_PUBSUB=""
+
+# Set of direct peers to be used when pubsub is set to gossipsub
+DEFAULT_CERC_GOSSIPSUB_DIRECT_PEERS=[]
+
+# Whether to enable payments to upstream ETH server
+DEFAULT_CERC_ENABLE_UPSTREAM_PAYMENTS=true
diff --git a/app/data/config/watcher-mobymask-v3/start-server.sh b/app/data/config/watcher-mobymask-v3/start-server.sh
new file mode 100755
index 00000000..b3546e81
--- /dev/null
+++ b/app/data/config/watcher-mobymask-v3/start-server.sh
@@ -0,0 +1,163 @@
+#!/bin/bash
+
+set -e
+if [ -n "$CERC_SCRIPT_DEBUG" ]; then
+ set -x
+fi
+
+CERC_ETH_RPC_QUERY_ENDPOINT="${CERC_ETH_RPC_QUERY_ENDPOINT:-${DEFAULT_CERC_ETH_RPC_QUERY_ENDPOINT}}"
+CERC_ETH_RPC_MUTATION_ENDPOINT="${CERC_ETH_RPC_MUTATION_ENDPOINT:-${DEFAULT_CERC_ETH_RPC_MUTATION_ENDPOINT}}"
+CERC_RELAY_PEERS="${CERC_RELAY_PEERS:-${DEFAULT_CERC_RELAY_PEERS}}"
+CERC_DENY_MULTIADDRS="${CERC_DENY_MULTIADDRS:-${DEFAULT_CERC_DENY_MULTIADDRS}}"
+CERC_PUBSUB="${CERC_PUBSUB:-${DEFAULT_CERC_PUBSUB}}"
+CERC_RELAY_ANNOUNCE_DOMAIN="${CERC_RELAY_ANNOUNCE_DOMAIN:-${DEFAULT_CERC_RELAY_ANNOUNCE_DOMAIN}}"
+CERC_ENABLE_PEER_L2_TXS="${CERC_ENABLE_PEER_L2_TXS:-${DEFAULT_CERC_ENABLE_PEER_L2_TXS}}"
+CERC_DEPLOYED_CONTRACT="${CERC_DEPLOYED_CONTRACT:-${DEFAULT_CERC_DEPLOYED_CONTRACT}}"
+CERC_ENABLE_UPSTREAM_PAYMENTS="${CERC_ENABLE_UPSTREAM_PAYMENTS:-${DEFAULT_CERC_ENABLE_UPSTREAM_PAYMENTS}}"
+
+watcher_keys_dir="./keys"
+
+echo "Using RPC query endpoint ${CERC_ETH_RPC_QUERY_ENDPOINT}"
+echo "Using RPC mutation endpoint ${CERC_ETH_RPC_MUTATION_ENDPOINT}"
+
+# Use public domain for relay multiaddr in peer config if specified
+# Otherwise, use the docker container's host IP
+if [ -n "$CERC_RELAY_ANNOUNCE_DOMAIN" ]; then
+ CERC_RELAY_MULTIADDR="/dns4/${CERC_RELAY_ANNOUNCE_DOMAIN}/tcp/443/wss/p2p/$(jq -r '.id' /app/peers/relay-id.json)"
+else
+ CERC_RELAY_MULTIADDR="/dns4/mobymask-watcher-server/tcp/9090/ws/p2p/$(jq -r '.id' /app/peers/relay-id.json)"
+fi
+
+# Use contract address from environment variable or set from config.json in mounted volume
+if [ -n "$CERC_DEPLOYED_CONTRACT" ]; then
+ CONTRACT_ADDRESS="${CERC_DEPLOYED_CONTRACT}"
+else
+ # Assign deployed contract address from server config (created by mobymask container after deploying contract)
+ CONTRACT_ADDRESS=$(jq -r '.address' /server/config.json | tr -d '"')
+fi
+
+nitro_addresses_file="/nitro/nitro-addresses.json"
+nitro_addresses_destination_file="./src/nitro-addresses.json"
+
+# Check if CERC_NA_ADDRESS environment variable is set
+if [ -n "$CERC_NA_ADDRESS" ]; then
+ echo "CERC_NA_ADDRESS is set to '$CERC_NA_ADDRESS'"
+ echo "CERC_VPA_ADDRESS is set to '$CERC_VPA_ADDRESS'"
+ echo "CERC_CA_ADDRESS is set to '$CERC_CA_ADDRESS'"
+ echo "Using the above Nitro addresses"
+
+ # Create the required JSON and write it to a file
+ nitro_addresses_json=$(jq -n \
+ --arg na "$CERC_NA_ADDRESS" \
+ --arg vpa "$CERC_VPA_ADDRESS" \
+ --arg ca "$CERC_CA_ADDRESS" \
+ '.nitroAdjudicatorAddress = $na | .virtualPaymentAppAddress = $vpa | .consensusAppAddress = $ca')
+ echo "$nitro_addresses_json" > "${nitro_addresses_destination_file}"
+elif [ -f ${nitro_addresses_file} ]; then
+ echo "Using Nitro addresses from ${nitro_addresses_file}:"
+ cat "$nitro_addresses_file"
+ cat "$nitro_addresses_file" > "$nitro_addresses_destination_file"
+else
+ echo "Nitro addresses not available"
+ exit 1
+fi
+
+# Build after setting the Nitro addresses
+yarn build
+
+echo "Using CERC_PRIVATE_KEY_PEER (account with funds) from env for sending txs to L2"
+echo "Using CERC_WATCHER_NITRO_PK from env for Nitro account"
+
+if [ -n "$CERC_PEER_ID" ]; then
+ echo "Using CERC_PEER_ID ${CERC_PEER_ID} from env for watcher fixture"
+ echo "Consensus module enabled"
+
+ # Set corresponding variables
+ PEER_ID_FILE='./peer-id.json'
+ CONSENSUS_ENABLED=true
+ WATCHER_PARTY_PEERS_FILE='./watcher-party-peers.json'
+
+ # Create watcher party array
+ watcher_parties=()
+
+ # Iterate over each fixture JSON file
+ for peer_data_file in "$watcher_keys_dir"/*.json; do
+ # Extract the filename without the path and extension
+ peer_id=$(basename "$peer_data_file" .json)
+
+ # Read the consensus keys
+ consensus_public_key=$(jq -r '.consensus.publicKey' "$peer_data_file")
+ consensus_private_key=$(jq -r '.consensus.privateKey' "$peer_data_file")
+
+ # Append watcher party
+ watcher_party=$(jq -n \
+ --arg peerId "$peer_id" \
+ --arg publicKey "$consensus_public_key" \
+ '.peerId = $peerId | .publicKey = $publicKey')
+ watcher_parties+=("$watcher_party")
+
+ if [ "$peer_id" = "$CERC_PEER_ID" ]; then
+ # Export peer id
+ peer_id_data=$(jq '.peerId' "$peer_data_file")
+ echo "$peer_id_data" > "${PEER_ID_FILE}"
+
+ # Set consensus keys for this peer
+ CONSENSUS_PUBLIC_KEY=${consensus_public_key}
+ CONSENSUS_PRIVATE_KEY=${consensus_private_key}
+ fi
+ done
+
+ # Export watcher party file
+ watcher_parties_json=$(printf '%s\n' "${watcher_parties[@]}" | jq -s .)
+ echo "$watcher_parties_json" > "${WATCHER_PARTY_PEERS_FILE}"
+ echo "Watcher party peers exported to ${WATCHER_PARTY_PEERS_FILE}"
+else
+ echo "Using generated peer id"
+ echo "Consensus module disabled"
+
+ # Set corresponding variables
+ PEER_ID_FILE='./peers/peer-id.json'
+ CONSENSUS_ENABLED=false
+ WATCHER_PARTY_PEERS_FILE=''
+ CONSENSUS_PUBLIC_KEY=''
+ CONSENSUS_PRIVATE_KEY=''
+fi
+
+if [ "$CERC_ENABLE_UPSTREAM_PAYMENTS" = true ]; then
+ UPSTREAM_NITRO_ADDRESS=${CERC_UPSTREAM_NITRO_ADDRESS}
+ UPSTREAM_NITRO_MULTIADDR=${CERC_UPSTREAM_NITRO_MULTIADDR}
+ UPSTREAM_NITRO_PAY_AMOUNT=${CERC_UPSTREAM_NITRO_PAY_AMOUNT}
+else
+ UPSTREAM_NITRO_ADDRESS=""
+ UPSTREAM_NITRO_MULTIADDR=""
+ UPSTREAM_NITRO_PAY_AMOUNT=""
+fi
+
+# Read in the config template TOML file and modify it
+WATCHER_CONFIG_TEMPLATE=$(cat environments/watcher-config-template.toml)
+WATCHER_CONFIG=$(echo "$WATCHER_CONFIG_TEMPLATE" | \
+ sed -E "s|REPLACE_WITH_CERC_RELAY_PEERS|${CERC_RELAY_PEERS}|g; \
+ s|REPLACE_WITH_CERC_DENY_MULTIADDRS|${CERC_DENY_MULTIADDRS}|g; \
+ s/REPLACE_WITH_CERC_PUBSUB/${CERC_PUBSUB}/g; \
+ s/REPLACE_WITH_CERC_RELAY_ANNOUNCE_DOMAIN/${CERC_RELAY_ANNOUNCE_DOMAIN}/g; \
+ s|REPLACE_WITH_CERC_RELAY_MULTIADDR|${CERC_RELAY_MULTIADDR}|g; \
+ s|REPLACE_WITH_PEER_ID_FILE|${PEER_ID_FILE}|g; \
+ s/REPLACE_WITH_CERC_ENABLE_PEER_L2_TXS/${CERC_ENABLE_PEER_L2_TXS}/g; \
+ s/REPLACE_WITH_CERC_PRIVATE_KEY_PEER/${CERC_PRIVATE_KEY_PEER}/g; \
+ s/REPLACE_WITH_CERC_WATCHER_NITRO_PK/${CERC_WATCHER_NITRO_PK}/g; \
+ s/REPLACE_WITH_CONTRACT_ADDRESS/${CONTRACT_ADDRESS}/g; \
+ s/REPLACE_WITH_CONSENSUS_ENABLED/${CONSENSUS_ENABLED}/g; \
+ s/REPLACE_WITH_CONSENSUS_PUBLIC_KEY/${CONSENSUS_PUBLIC_KEY}/g; \
+ s/REPLACE_WITH_CONSENSUS_PRIVATE_KEY/${CONSENSUS_PRIVATE_KEY}/g; \
+ s|REPLACE_WITH_WATCHER_PARTY_PEERS_FILE|${WATCHER_PARTY_PEERS_FILE}|g; \
+ s|REPLACE_WITH_CERC_ETH_RPC_QUERY_ENDPOINT|${CERC_ETH_RPC_QUERY_ENDPOINT}|g; \
+ s|REPLACE_WITH_CERC_ETH_RPC_MUTATION_ENDPOINT|${CERC_ETH_RPC_MUTATION_ENDPOINT}|g; \
+ s/REPLACE_WITH_UPSTREAM_NITRO_ADDRESS/${UPSTREAM_NITRO_ADDRESS}/g; \
+ s|REPLACE_WITH_UPSTREAM_NITRO_MULTIADDR|${UPSTREAM_NITRO_MULTIADDR}|g; \
+ s/REPLACE_WITH_UPSTREAM_NITRO_PAY_AMOUNT/${UPSTREAM_NITRO_PAY_AMOUNT}/ ")
+
+# Write the modified content to a new file
+echo "$WATCHER_CONFIG" > environments/local.toml
+
+echo 'yarn server'
+yarn server
diff --git a/app/data/config/watcher-mobymask-v3/watcher-config-rates.toml b/app/data/config/watcher-mobymask-v3/watcher-config-rates.toml
new file mode 100644
index 00000000..ebb8f8a1
--- /dev/null
+++ b/app/data/config/watcher-mobymask-v3/watcher-config-rates.toml
@@ -0,0 +1,14 @@
+freeQueriesLimit = 10
+
+freeQueriesList = []
+
+[queries]
+ multiNonce = '50'
+ _owner = '50'
+ isRevoked = '50'
+ isPhisher = '50'
+ isMember = '50'
+
+[mutations]
+ invoke = '100'
+ revoke = '100'
diff --git a/app/data/config/watcher-mobymask-v3/watcher-config-template.toml b/app/data/config/watcher-mobymask-v3/watcher-config-template.toml
new file mode 100644
index 00000000..e2b5d1aa
--- /dev/null
+++ b/app/data/config/watcher-mobymask-v3/watcher-config-template.toml
@@ -0,0 +1,115 @@
+[server]
+ host = "0.0.0.0"
+ port = 3001
+ kind = "lazy"
+
+ # Checkpointing state.
+ checkpointing = true
+
+ # Checkpoint interval in number of blocks.
+ checkpointInterval = 2000
+
+ # Enable state creation
+ enableState = true
+
+ # Boolean to filter logs by contract.
+ filterLogs = true
+
+ # Max block range for which to return events in eventsInRange GQL query.
+ # Use -1 for skipping check on block range.
+ maxEventsBlockRange = -1
+
+ # Flag to specify whether RPC endpoint supports block hash as block tag parameter
+ rpcSupportsBlockHashParam = true
+
+ [server.p2p]
+ enableRelay = true
+ enablePeer = true
+
+ [server.p2p.relay]
+ host = "0.0.0.0"
+ port = 9090
+ relayPeers = REPLACE_WITH_CERC_RELAY_PEERS
+ denyMultiaddrs = REPLACE_WITH_CERC_DENY_MULTIADDRS
+ peerIdFile = './peers/relay-id.json'
+ announce = 'REPLACE_WITH_CERC_RELAY_ANNOUNCE_DOMAIN'
+ pubsub = 'REPLACE_WITH_CERC_PUBSUB'
+ enableDebugInfo = true
+
+ [server.p2p.peer]
+ relayMultiaddr = 'REPLACE_WITH_CERC_RELAY_MULTIADDR'
+ pubSubTopic = 'mobymask'
+ denyMultiaddrs = REPLACE_WITH_CERC_DENY_MULTIADDRS
+ peerIdFile = 'REPLACE_WITH_PEER_ID_FILE'
+ pubsub = 'REPLACE_WITH_CERC_PUBSUB'
+ enableDebugInfo = true
+ enableL2Txs = REPLACE_WITH_CERC_ENABLE_PEER_L2_TXS
+ pingInterval = 4000
+ pingTimeout = 1500
+ maxRelayConnections = 10
+
+ [server.p2p.peer.l2TxsConfig]
+ privateKey = 'REPLACE_WITH_CERC_PRIVATE_KEY_PEER'
+ contractAddress = 'REPLACE_WITH_CONTRACT_ADDRESS'
+
+ [server.p2p.nitro]
+ store = './out/nitro-db'
+ privateKey = 'REPLACE_WITH_CERC_WATCHER_NITRO_PK'
+ chainPrivateKey = 'REPLACE_WITH_CERC_PRIVATE_KEY_PEER'
+
+ [server.p2p.nitro.payments]
+ ratesFile = './environments/rates.toml'
+ requestTimeoutInSecs = 10
+
+ [server.p2p.nitro.payments.cache]
+ maxAccounts = 1000
+ accountTTLInSecs = 1800
+ maxVouchersPerAccount = 1000
+ voucherTTLInSecs = 300
+ maxPaymentChannels = 10000
+ paymentChannelTTLInSecs = 1800
+
+ [server.p2p.consensus]
+ enabled = REPLACE_WITH_CONSENSUS_ENABLED
+ publicKey = 'REPLACE_WITH_CONSENSUS_PUBLIC_KEY'
+ privateKey = 'REPLACE_WITH_CONSENSUS_PRIVATE_KEY'
+ watcherPartyPeersFile = 'REPLACE_WITH_WATCHER_PARTY_PEERS_FILE'
+
+[metrics]
+ host = "0.0.0.0"
+ port = 9000
+ [metrics.gql]
+ port = 9001
+
+[database]
+ type = "postgres"
+ host = "mobymask-watcher-db"
+ port = 5432
+ database = "mobymask-watcher"
+ username = "vdbm"
+ password = "password"
+ synchronize = true
+ logging = false
+
+[upstream]
+ [upstream.ethServer]
+ gqlApiEndpoint = 'http://ipld-eth-server:8083/graphql'
+ rpcProviderEndpoint = 'REPLACE_WITH_CERC_ETH_RPC_QUERY_ENDPOINT'
+ rpcProviderMutationEndpoint = 'REPLACE_WITH_CERC_ETH_RPC_MUTATION_ENDPOINT'
+
+ [upstream.ethServer.rpcProviderNitroNode]
+ address = 'REPLACE_WITH_UPSTREAM_NITRO_ADDRESS'
+ multiAddr = 'REPLACE_WITH_UPSTREAM_NITRO_MULTIADDR'
+ amount = 'REPLACE_WITH_UPSTREAM_NITRO_PAY_AMOUNT'
+
+ [upstream.cache]
+ name = "requests"
+ enabled = false
+ deleteOnStart = false
+
+[jobQueue]
+ dbConnectionString = "postgres://vdbm:password@mobymask-watcher-db/mobymask-watcher-job-queue"
+ maxCompletionLagInSecs = 300
+ jobDelayInMilliSecs = 100
+ eventsInBatch = 50
+ blockDelayInMilliSecs = 60000
diff --git a/app/data/config/watcher-mobymask/mobymask-watcher-db.sql b/app/data/config/watcher-mobymask/mobymask-watcher-db.sql
index 5b116b79..c77542bd 100644
--- a/app/data/config/watcher-mobymask/mobymask-watcher-db.sql
+++ b/app/data/config/watcher-mobymask/mobymask-watcher-db.sql
@@ -2,8 +2,8 @@
-- PostgreSQL database dump
--
--- Dumped from database version 12.11
--- Dumped by pg_dump version 14.3 (Ubuntu 14.3-0ubuntu0.22.04.1)
+-- Dumped from database version 14.8
+-- Dumped by pg_dump version 14.8
SET statement_timeout = 0;
SET lock_timeout = 0;
@@ -17,10 +17,10 @@ SET client_min_messages = warning;
SET row_security = off;
--
--- Name: ipld_block_kind_enum; Type: TYPE; Schema: public; Owner: vdbm
+-- Name: state_kind_enum; Type: TYPE; Schema: public; Owner: vdbm
--
-CREATE TYPE public.ipld_block_kind_enum AS ENUM (
+CREATE TYPE public.state_kind_enum AS ENUM (
'diff',
'init',
'diff_staged',
@@ -28,7 +28,7 @@ CREATE TYPE public.ipld_block_kind_enum AS ENUM (
);
-ALTER TYPE public.ipld_block_kind_enum OWNER TO vdbm;
+ALTER TYPE public.state_kind_enum OWNER TO vdbm;
SET default_tablespace = '';
@@ -153,44 +153,6 @@ ALTER TABLE public.contract_id_seq OWNER TO vdbm;
ALTER SEQUENCE public.contract_id_seq OWNED BY public.contract.id;
---
--- Name: domain_hash; Type: TABLE; Schema: public; Owner: vdbm
---
-
-CREATE TABLE public.domain_hash (
- id integer NOT NULL,
- block_hash character varying(66) NOT NULL,
- block_number integer NOT NULL,
- contract_address character varying(42) NOT NULL,
- value character varying NOT NULL,
- proof text
-);
-
-
-ALTER TABLE public.domain_hash OWNER TO vdbm;
-
---
--- Name: domain_hash_id_seq; Type: SEQUENCE; Schema: public; Owner: vdbm
---
-
-CREATE SEQUENCE public.domain_hash_id_seq
- AS integer
- START WITH 1
- INCREMENT BY 1
- NO MINVALUE
- NO MAXVALUE
- CACHE 1;
-
-
-ALTER TABLE public.domain_hash_id_seq OWNER TO vdbm;
-
---
--- Name: domain_hash_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: vdbm
---
-
-ALTER SEQUENCE public.domain_hash_id_seq OWNED BY public.domain_hash.id;
-
-
--
-- Name: event; Type: TABLE; Schema: public; Owner: vdbm
--
@@ -232,80 +194,6 @@ ALTER TABLE public.event_id_seq OWNER TO vdbm;
ALTER SEQUENCE public.event_id_seq OWNED BY public.event.id;
---
--- Name: ipld_block; Type: TABLE; Schema: public; Owner: vdbm
---
-
-CREATE TABLE public.ipld_block (
- id integer NOT NULL,
- contract_address character varying(42) NOT NULL,
- cid character varying NOT NULL,
- kind public.ipld_block_kind_enum NOT NULL,
- data bytea NOT NULL,
- block_id integer
-);
-
-
-ALTER TABLE public.ipld_block OWNER TO vdbm;
-
---
--- Name: ipld_block_id_seq; Type: SEQUENCE; Schema: public; Owner: vdbm
---
-
-CREATE SEQUENCE public.ipld_block_id_seq
- AS integer
- START WITH 1
- INCREMENT BY 1
- NO MINVALUE
- NO MAXVALUE
- CACHE 1;
-
-
-ALTER TABLE public.ipld_block_id_seq OWNER TO vdbm;
-
---
--- Name: ipld_block_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: vdbm
---
-
-ALTER SEQUENCE public.ipld_block_id_seq OWNED BY public.ipld_block.id;
-
-
---
--- Name: ipld_status; Type: TABLE; Schema: public; Owner: vdbm
---
-
-CREATE TABLE public.ipld_status (
- id integer NOT NULL,
- latest_hooks_block_number integer NOT NULL,
- latest_checkpoint_block_number integer NOT NULL,
- latest_ipfs_block_number integer NOT NULL
-);
-
-
-ALTER TABLE public.ipld_status OWNER TO vdbm;
-
---
--- Name: ipld_status_id_seq; Type: SEQUENCE; Schema: public; Owner: vdbm
---
-
-CREATE SEQUENCE public.ipld_status_id_seq
- AS integer
- START WITH 1
- INCREMENT BY 1
- NO MINVALUE
- NO MAXVALUE
- CACHE 1;
-
-
-ALTER TABLE public.ipld_status_id_seq OWNER TO vdbm;
-
---
--- Name: ipld_status_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: vdbm
---
-
-ALTER SEQUENCE public.ipld_status_id_seq OWNED BY public.ipld_status.id;
-
-
--
-- Name: is_member; Type: TABLE; Schema: public; Owner: vdbm
--
@@ -463,6 +351,79 @@ ALTER TABLE public.multi_nonce_id_seq OWNER TO vdbm;
ALTER SEQUENCE public.multi_nonce_id_seq OWNED BY public.multi_nonce.id;
+--
+-- Name: state; Type: TABLE; Schema: public; Owner: vdbm
+--
+
+CREATE TABLE public.state (
+ id integer NOT NULL,
+ contract_address character varying(42) NOT NULL,
+ cid character varying NOT NULL,
+ kind public.state_kind_enum NOT NULL,
+ data bytea NOT NULL,
+ block_id integer
+);
+
+
+ALTER TABLE public.state OWNER TO vdbm;
+
+--
+-- Name: state_id_seq; Type: SEQUENCE; Schema: public; Owner: vdbm
+--
+
+CREATE SEQUENCE public.state_id_seq
+ AS integer
+ START WITH 1
+ INCREMENT BY 1
+ NO MINVALUE
+ NO MAXVALUE
+ CACHE 1;
+
+
+ALTER TABLE public.state_id_seq OWNER TO vdbm;
+
+--
+-- Name: state_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: vdbm
+--
+
+ALTER SEQUENCE public.state_id_seq OWNED BY public.state.id;
+
+
+--
+-- Name: state_sync_status; Type: TABLE; Schema: public; Owner: vdbm
+--
+
+CREATE TABLE public.state_sync_status (
+ id integer NOT NULL,
+ latest_indexed_block_number integer NOT NULL,
+ latest_checkpoint_block_number integer
+);
+
+
+ALTER TABLE public.state_sync_status OWNER TO vdbm;
+
+--
+-- Name: state_sync_status_id_seq; Type: SEQUENCE; Schema: public; Owner: vdbm
+--
+
+CREATE SEQUENCE public.state_sync_status_id_seq
+ AS integer
+ START WITH 1
+ INCREMENT BY 1
+ NO MINVALUE
+ NO MAXVALUE
+ CACHE 1;
+
+
+ALTER TABLE public.state_sync_status_id_seq OWNER TO vdbm;
+
+--
+-- Name: state_sync_status_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: vdbm
+--
+
+ALTER SEQUENCE public.state_sync_status_id_seq OWNED BY public.state_sync_status.id;
+
+
--
-- Name: sync_status; Type: TABLE; Schema: public; Owner: vdbm
--
@@ -504,6 +465,22 @@ ALTER TABLE public.sync_status_id_seq OWNER TO vdbm;
ALTER SEQUENCE public.sync_status_id_seq OWNED BY public.sync_status.id;
+--
+-- Name: typeorm_metadata; Type: TABLE; Schema: public; Owner: vdbm
+--
+
+CREATE TABLE public.typeorm_metadata (
+ type character varying NOT NULL,
+ database character varying,
+ schema character varying,
+ "table" character varying,
+ name character varying,
+ value text
+);
+
+
+ALTER TABLE public.typeorm_metadata OWNER TO vdbm;
+
--
-- Name: _owner id; Type: DEFAULT; Schema: public; Owner: vdbm
--
@@ -525,13 +502,6 @@ ALTER TABLE ONLY public.block_progress ALTER COLUMN id SET DEFAULT nextval('publ
ALTER TABLE ONLY public.contract ALTER COLUMN id SET DEFAULT nextval('public.contract_id_seq'::regclass);
---
--- Name: domain_hash id; Type: DEFAULT; Schema: public; Owner: vdbm
---
-
-ALTER TABLE ONLY public.domain_hash ALTER COLUMN id SET DEFAULT nextval('public.domain_hash_id_seq'::regclass);
-
-
--
-- Name: event id; Type: DEFAULT; Schema: public; Owner: vdbm
--
@@ -539,20 +509,6 @@ ALTER TABLE ONLY public.domain_hash ALTER COLUMN id SET DEFAULT nextval('public.
ALTER TABLE ONLY public.event ALTER COLUMN id SET DEFAULT nextval('public.event_id_seq'::regclass);
---
--- Name: ipld_block id; Type: DEFAULT; Schema: public; Owner: vdbm
---
-
-ALTER TABLE ONLY public.ipld_block ALTER COLUMN id SET DEFAULT nextval('public.ipld_block_id_seq'::regclass);
-
-
---
--- Name: ipld_status id; Type: DEFAULT; Schema: public; Owner: vdbm
---
-
-ALTER TABLE ONLY public.ipld_status ALTER COLUMN id SET DEFAULT nextval('public.ipld_status_id_seq'::regclass);
-
-
--
-- Name: is_member id; Type: DEFAULT; Schema: public; Owner: vdbm
--
@@ -581,6 +537,20 @@ ALTER TABLE ONLY public.is_revoked ALTER COLUMN id SET DEFAULT nextval('public.i
ALTER TABLE ONLY public.multi_nonce ALTER COLUMN id SET DEFAULT nextval('public.multi_nonce_id_seq'::regclass);
+--
+-- Name: state id; Type: DEFAULT; Schema: public; Owner: vdbm
+--
+
+ALTER TABLE ONLY public.state ALTER COLUMN id SET DEFAULT nextval('public.state_id_seq'::regclass);
+
+
+--
+-- Name: state_sync_status id; Type: DEFAULT; Schema: public; Owner: vdbm
+--
+
+ALTER TABLE ONLY public.state_sync_status ALTER COLUMN id SET DEFAULT nextval('public.state_sync_status_id_seq'::regclass);
+
+
--
-- Name: sync_status id; Type: DEFAULT; Schema: public; Owner: vdbm
--
@@ -601,12 +571,7 @@ COPY public._owner (id, block_hash, block_number, contract_address, value, proof
--
COPY public.block_progress (id, cid, block_hash, parent_hash, block_number, block_timestamp, num_events, num_processed_events, last_processed_event_index, is_complete, is_pruned, created_at) FROM stdin;
-1 bagiacgzahk6aqbbp75hft2xvtqnj425qaxj7ze4fspykcs745cyxg34bb3ba 0x3abc08042fff4e59eaf59c1a9e6bb005d3fc938593f0a14bfce8b1736f810ec2 0xafbdc83ac2dc79b5500c67751472eeac76594e4466c367b5f4a2895cd175ed97 14869713 1653872939 1 1 77 t f 2022-07-18 12:34:00.523
-5 bagiacgzav62hayc73buzkf24foyh5vrnt54ndxv76m6of7dprvpqpkpl5sra 0xafb470605fd86995175c2bb07ed62d9f78d1debff33ce2fc6f8d5f07a9ebeca2 0x33283f0fa7702e8c366715738c1d34c9750edd9cf74ae5dfb8d11f262ad69027 14885755 1654099778 2 2 119 t f 2022-07-18 12:34:42.361
-2 bagiacgzafdfrnz2azvox32djx3rjk7tuij4q5hlxjzxhdackm6jty7tcqa4a 0x28cb16e740cd5d7de869bee2957e7442790e9d774e6e71804a67933c7e628038 0xabd4915ed36022a05a9d95f51dc702103a2caab4c2f161321ab12a6bb77f01d1 14875233 1653950619 8 8 440 t f 2022-07-18 12:34:09.416
-3 bagiacgzan6rpxee4tm4gmzgcer3yx4enpvodtpzn2t2bjj72cblkhrng5bxa 0x6fa2fb909c9b386664c224778bf08d7d5c39bf2dd4f414a7fa1056a3c5a6e86e 0x976a8cb34b85994bce2fa5bda884f2a7c8ad68050645cb2dba5519e59cba013d 14876405 1653966919 4 4 274 t f 2022-07-18 12:34:19.014
-4 bagiacgzabrcmklsd5c3egq2hlrypg7opagtvuysqaf5r2q7nue2stozixbaa 0x0c44c52e43e8b64343475c70f37dcf01a75a6250017b1d43eda13529bb28b840 0xe48d7477413de216d3f7f4868b472047b82c8738890d7096f6c0e8398e92e39e 14884873 1654087572 12 12 518 t f 2022-07-18 12:34:33.681
-6 bagiacgzad4pz3x2ugxppkduwmvr2ncx4gavr2q5r5limcwr3gol2c7cff24q 0x1f1f9ddf5435def50e966563a68afc302b1d43b1ead0c15a3b3397a17c452eb9 0xbb8016b536b4f4e8ee93c614d74485a7d7eca814b49132599a932cfd03e324a2 15234194 1659054431 12 12 236 t f 2022-07-29 10:37:48.236
+1 bagiacgzauxdi4c475drog7xk4tejff6gfjuizi7wwyi5zpi7zywluz6qjgta 0xa5c68e0b9fe8e2e37eeae4c89297c62a688ca3f6b611dcbd1fce2cba67d049a6 0x4be849db46f69accfd7c435011eac58ba368508cf965bb1a6a188480e6f0e8eb 17960760 1692592607 1 1 130 t f 2023-08-29 16:48:14.226
\.
@@ -615,15 +580,7 @@ COPY public.block_progress (id, cid, block_hash, parent_hash, block_number, bloc
--
COPY public.contract (id, address, kind, checkpoint, starting_block) FROM stdin;
-1 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 PhisherRegistry t 14869713
-\.
-
-
---
--- Data for Name: domain_hash; Type: TABLE DATA; Schema: public; Owner: vdbm
---
-
-COPY public.domain_hash (id, block_hash, block_number, contract_address, value, proof) FROM stdin;
+1 0xD07Ed0eB708Cb7A660D22f2Ddf7b8C19c7bf1F69 PhisherRegistry t 1
\.
@@ -632,67 +589,7 @@ COPY public.domain_hash (id, block_hash, block_number, contract_address, value,
--
COPY public.event (id, tx_hash, index, contract, event_name, event_info, extra_info, proof, block_id) FROM stdin;
-1 0x82f33cec81da44e94ef69924bc7d786d3f7856f06c1ef583d266dd1b7f091b82 77 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 OwnershipTransferred {"previousOwner":"0x0000000000000000000000000000000000000000","newOwner":"0xDdb18b319BE3530560eECFF962032dFAD88212d4"} {"topics":["0x8be0079c531659141344cd1fd0a4f28419497f9722a3daafe3b4186f6b6457e0","0x0000000000000000000000000000000000000000000000000000000000000000","0x000000000000000000000000ddb18b319be3530560eecff962032dfad88212d4"],"data":"0x","tx":{"cid":"bagjqcgzaqlztz3eb3jcostxwtesly7lynu7xqvxqnqppla6sm3orw7yjdoba","txHash":"0x82f33cec81da44e94ef69924bc7d786d3f7856f06c1ef583d266dd1b7f091b82","index":38,"src":"0xDdb18b319BE3530560eECFF962032dFAD88212d4","dst":"","__typename":"EthTransactionCid"},"eventSignature":"OwnershipTransferred(address,address)"} {"data":"{\\"blockHash\\":\\"0x3abc08042fff4e59eaf59c1a9e6bb005d3fc938593f0a14bfce8b1736f810ec2\\",\\"receiptCID\\":\\"bagkacgzappvknoiwyepymknt7dbcfh3jlejpscm3frdd66dwvkvmfwuuuota\\",\\"log\\":{\\"cid\\":\\"bagmqcgzak5xa5kdm3sjuvm3un77ll7oz2degukktjargydrj4fayhimdfo3a\\",\\"ipldBlock\\":\\"0xf882822080b87df87b94b06e6db9288324738f04fcaac910f5a60102c1f8f863a08be0079c531659141344cd1fd0a4f28419497f9722a3daafe3b4186f6b6457e0a00000000000000000000000000000000000000000000000000000000000000000a0000000000000000000000000ddb18b319be3530560eecff962032dfad88212d480\\"}}"} 1
-2 0x98998f7446c63178ea77d1b184cdfe781b9cbcff27100f03806f482b354f1be9 433 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 DelegationTriggered {"principal":"0xDdb18b319BE3530560eECFF962032dFAD88212d4","agent":"0xDdb18b319BE3530560eECFF962032dFAD88212d4"} {"topics":["0x185d11175440fcb6458fbc1889b02953452539ed80ad1da781a5449500f6d960","0x000000000000000000000000ddb18b319be3530560eecff962032dfad88212d4"],"data":"0x000000000000000000000000ddb18b319be3530560eecff962032dfad88212d4","tx":{"cid":"bagjqcgzatcmy65cgyyyxr2tx2gyyjtp6panzzph7e4ia6a4an5ecwnkpdpuq","txHash":"0x98998f7446c63178ea77d1b184cdfe781b9cbcff27100f03806f482b354f1be9","index":136,"src":"0xFDEa65C8e26263F6d9A1B5de9555D2931A33b825","dst":"0xB06E6DB9288324738f04fCAAc910f5A60102C1F8","__typename":"EthTransactionCid"},"eventSignature":"DelegationTriggered(address,address)"} {"data":"{\\"blockHash\\":\\"0x28cb16e740cd5d7de869bee2957e7442790e9d774e6e71804a67933c7e628038\\",\\"receiptCID\\":\\"bagkacgza7njxwiac6p4vxcmw5gnyxs32bum5jeq6k3j7xxyzaqm7gcrw6hwa\\",\\"log\\":{\\"cid\\":\\"bagmqcgzaz22koutltuxcphbuc72dcdt6xuqr2e3mk4w75xksg2zzqaynbmoa\\",\\"ipldBlock\\":\\"0xf87f30b87cf87a94b06e6db9288324738f04fcaac910f5a60102c1f8f842a0185d11175440fcb6458fbc1889b02953452539ed80ad1da781a5449500f6d960a0000000000000000000000000ddb18b319be3530560eecff962032dfad88212d4a0000000000000000000000000ddb18b319be3530560eecff962032dfad88212d4\\"}}"} 2
-3 0x98998f7446c63178ea77d1b184cdfe781b9cbcff27100f03806f482b354f1be9 434 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 MemberStatusUpdated {"entity":"0xdd77c46f6a736e44f19d33c56378a607fe3868a8c1a0866951beab5c9abc9aab","isMember":true} {"topics":["0x88e1b1a43f3edcb9afe941dfea296f5bc32fab715b5fc9aa101ec26d87d2e8a2","0xdd77c46f6a736e44f19d33c56378a607fe3868a8c1a0866951beab5c9abc9aab"],"data":"0x0000000000000000000000000000000000000000000000000000000000000001","tx":{"cid":"bagjqcgzatcmy65cgyyyxr2tx2gyyjtp6panzzph7e4ia6a4an5ecwnkpdpuq","txHash":"0x98998f7446c63178ea77d1b184cdfe781b9cbcff27100f03806f482b354f1be9","index":136,"src":"0xFDEa65C8e26263F6d9A1B5de9555D2931A33b825","dst":"0xB06E6DB9288324738f04fCAAc910f5A60102C1F8","__typename":"EthTransactionCid"},"eventSignature":"MemberStatusUpdated(string,bool)"} {"data":"{\\"blockHash\\":\\"0x28cb16e740cd5d7de869bee2957e7442790e9d774e6e71804a67933c7e628038\\",\\"receiptCID\\":\\"bagkacgza7njxwiac6p4vxcmw5gnyxs32bum5jeq6k3j7xxyzaqm7gcrw6hwa\\",\\"log\\":{\\"cid\\":\\"bagmqcgzaflsnlinnufdz4ipp7vhrvg4gggvptx7ringzkwjfsrkw5bstou7a\\",\\"ipldBlock\\":\\"0xf87f20b87cf87a94b06e6db9288324738f04fcaac910f5a60102c1f8f842a088e1b1a43f3edcb9afe941dfea296f5bc32fab715b5fc9aa101ec26d87d2e8a2a0dd77c46f6a736e44f19d33c56378a607fe3868a8c1a0866951beab5c9abc9aaba00000000000000000000000000000000000000000000000000000000000000001\\"}}"} 2
-4 0x98998f7446c63178ea77d1b184cdfe781b9cbcff27100f03806f482b354f1be9 435 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 DelegationTriggered {"principal":"0xDdb18b319BE3530560eECFF962032dFAD88212d4","agent":"0xDdb18b319BE3530560eECFF962032dFAD88212d4"} {"topics":["0x185d11175440fcb6458fbc1889b02953452539ed80ad1da781a5449500f6d960","0x000000000000000000000000ddb18b319be3530560eecff962032dfad88212d4"],"data":"0x000000000000000000000000ddb18b319be3530560eecff962032dfad88212d4","tx":{"cid":"bagjqcgzatcmy65cgyyyxr2tx2gyyjtp6panzzph7e4ia6a4an5ecwnkpdpuq","txHash":"0x98998f7446c63178ea77d1b184cdfe781b9cbcff27100f03806f482b354f1be9","index":136,"src":"0xFDEa65C8e26263F6d9A1B5de9555D2931A33b825","dst":"0xB06E6DB9288324738f04fCAAc910f5A60102C1F8","__typename":"EthTransactionCid"},"eventSignature":"DelegationTriggered(address,address)"} {"data":"{\\"blockHash\\":\\"0x28cb16e740cd5d7de869bee2957e7442790e9d774e6e71804a67933c7e628038\\",\\"receiptCID\\":\\"bagkacgza7njxwiac6p4vxcmw5gnyxs32bum5jeq6k3j7xxyzaqm7gcrw6hwa\\",\\"log\\":{\\"cid\\":\\"bagmqcgzanj72wfbfvqby3dvz3jnh5nwstmvl3nlm6kxrkgfio7z643s2qesq\\",\\"ipldBlock\\":\\"0xf87f20b87cf87a94b06e6db9288324738f04fcaac910f5a60102c1f8f842a0185d11175440fcb6458fbc1889b02953452539ed80ad1da781a5449500f6d960a0000000000000000000000000ddb18b319be3530560eecff962032dfad88212d4a0000000000000000000000000ddb18b319be3530560eecff962032dfad88212d4\\"}}"} 2
-5 0x98998f7446c63178ea77d1b184cdfe781b9cbcff27100f03806f482b354f1be9 436 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 MemberStatusUpdated {"entity":"0x501b05f326e247749a9ee05e173a4b32508afcf85ec6dbb26a6cbb2a4f2e8671","isMember":true} {"topics":["0x88e1b1a43f3edcb9afe941dfea296f5bc32fab715b5fc9aa101ec26d87d2e8a2","0x501b05f326e247749a9ee05e173a4b32508afcf85ec6dbb26a6cbb2a4f2e8671"],"data":"0x0000000000000000000000000000000000000000000000000000000000000001","tx":{"cid":"bagjqcgzatcmy65cgyyyxr2tx2gyyjtp6panzzph7e4ia6a4an5ecwnkpdpuq","txHash":"0x98998f7446c63178ea77d1b184cdfe781b9cbcff27100f03806f482b354f1be9","index":136,"src":"0xFDEa65C8e26263F6d9A1B5de9555D2931A33b825","dst":"0xB06E6DB9288324738f04fCAAc910f5A60102C1F8","__typename":"EthTransactionCid"},"eventSignature":"MemberStatusUpdated(string,bool)"} {"data":"{\\"blockHash\\":\\"0x28cb16e740cd5d7de869bee2957e7442790e9d774e6e71804a67933c7e628038\\",\\"receiptCID\\":\\"bagkacgza7njxwiac6p4vxcmw5gnyxs32bum5jeq6k3j7xxyzaqm7gcrw6hwa\\",\\"log\\":{\\"cid\\":\\"bagmqcgzalcfjovtx7akikb4dhhu3i65pym47rdy3rys6d7trlfdzmr53us2a\\",\\"ipldBlock\\":\\"0xf87f20b87cf87a94b06e6db9288324738f04fcaac910f5a60102c1f8f842a088e1b1a43f3edcb9afe941dfea296f5bc32fab715b5fc9aa101ec26d87d2e8a2a0501b05f326e247749a9ee05e173a4b32508afcf85ec6dbb26a6cbb2a4f2e8671a00000000000000000000000000000000000000000000000000000000000000001\\"}}"} 2
-6 0x98998f7446c63178ea77d1b184cdfe781b9cbcff27100f03806f482b354f1be9 437 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 DelegationTriggered {"principal":"0xDdb18b319BE3530560eECFF962032dFAD88212d4","agent":"0xDdb18b319BE3530560eECFF962032dFAD88212d4"} {"topics":["0x185d11175440fcb6458fbc1889b02953452539ed80ad1da781a5449500f6d960","0x000000000000000000000000ddb18b319be3530560eecff962032dfad88212d4"],"data":"0x000000000000000000000000ddb18b319be3530560eecff962032dfad88212d4","tx":{"cid":"bagjqcgzatcmy65cgyyyxr2tx2gyyjtp6panzzph7e4ia6a4an5ecwnkpdpuq","txHash":"0x98998f7446c63178ea77d1b184cdfe781b9cbcff27100f03806f482b354f1be9","index":136,"src":"0xFDEa65C8e26263F6d9A1B5de9555D2931A33b825","dst":"0xB06E6DB9288324738f04fCAAc910f5A60102C1F8","__typename":"EthTransactionCid"},"eventSignature":"DelegationTriggered(address,address)"} {"data":"{\\"blockHash\\":\\"0x28cb16e740cd5d7de869bee2957e7442790e9d774e6e71804a67933c7e628038\\",\\"receiptCID\\":\\"bagkacgza7njxwiac6p4vxcmw5gnyxs32bum5jeq6k3j7xxyzaqm7gcrw6hwa\\",\\"log\\":{\\"cid\\":\\"bagmqcgzanj72wfbfvqby3dvz3jnh5nwstmvl3nlm6kxrkgfio7z643s2qesq\\",\\"ipldBlock\\":\\"0xf87f20b87cf87a94b06e6db9288324738f04fcaac910f5a60102c1f8f842a0185d11175440fcb6458fbc1889b02953452539ed80ad1da781a5449500f6d960a0000000000000000000000000ddb18b319be3530560eecff962032dfad88212d4a0000000000000000000000000ddb18b319be3530560eecff962032dfad88212d4\\"}}"} 2
-7 0x98998f7446c63178ea77d1b184cdfe781b9cbcff27100f03806f482b354f1be9 438 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 MemberStatusUpdated {"entity":"0x0b73fffe472959ca14f2bfa56de755ad570d80daaf8eb935ac5e60578d9cdf6e","isMember":true} {"topics":["0x88e1b1a43f3edcb9afe941dfea296f5bc32fab715b5fc9aa101ec26d87d2e8a2","0x0b73fffe472959ca14f2bfa56de755ad570d80daaf8eb935ac5e60578d9cdf6e"],"data":"0x0000000000000000000000000000000000000000000000000000000000000001","tx":{"cid":"bagjqcgzatcmy65cgyyyxr2tx2gyyjtp6panzzph7e4ia6a4an5ecwnkpdpuq","txHash":"0x98998f7446c63178ea77d1b184cdfe781b9cbcff27100f03806f482b354f1be9","index":136,"src":"0xFDEa65C8e26263F6d9A1B5de9555D2931A33b825","dst":"0xB06E6DB9288324738f04fCAAc910f5A60102C1F8","__typename":"EthTransactionCid"},"eventSignature":"MemberStatusUpdated(string,bool)"} {"data":"{\\"blockHash\\":\\"0x28cb16e740cd5d7de869bee2957e7442790e9d774e6e71804a67933c7e628038\\",\\"receiptCID\\":\\"bagkacgza7njxwiac6p4vxcmw5gnyxs32bum5jeq6k3j7xxyzaqm7gcrw6hwa\\",\\"log\\":{\\"cid\\":\\"bagmqcgzaehy7vjkfidari3wc72kp3baac2w5zjfcmt4wvz6bs4mgkpjrlnta\\",\\"ipldBlock\\":\\"0xf87f20b87cf87a94b06e6db9288324738f04fcaac910f5a60102c1f8f842a088e1b1a43f3edcb9afe941dfea296f5bc32fab715b5fc9aa101ec26d87d2e8a2a00b73fffe472959ca14f2bfa56de755ad570d80daaf8eb935ac5e60578d9cdf6ea00000000000000000000000000000000000000000000000000000000000000001\\"}}"} 2
-8 0x98998f7446c63178ea77d1b184cdfe781b9cbcff27100f03806f482b354f1be9 439 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 DelegationTriggered {"principal":"0xDdb18b319BE3530560eECFF962032dFAD88212d4","agent":"0xDdb18b319BE3530560eECFF962032dFAD88212d4"} {"topics":["0x185d11175440fcb6458fbc1889b02953452539ed80ad1da781a5449500f6d960","0x000000000000000000000000ddb18b319be3530560eecff962032dfad88212d4"],"data":"0x000000000000000000000000ddb18b319be3530560eecff962032dfad88212d4","tx":{"cid":"bagjqcgzatcmy65cgyyyxr2tx2gyyjtp6panzzph7e4ia6a4an5ecwnkpdpuq","txHash":"0x98998f7446c63178ea77d1b184cdfe781b9cbcff27100f03806f482b354f1be9","index":136,"src":"0xFDEa65C8e26263F6d9A1B5de9555D2931A33b825","dst":"0xB06E6DB9288324738f04fCAAc910f5A60102C1F8","__typename":"EthTransactionCid"},"eventSignature":"DelegationTriggered(address,address)"} {"data":"{\\"blockHash\\":\\"0x28cb16e740cd5d7de869bee2957e7442790e9d774e6e71804a67933c7e628038\\",\\"receiptCID\\":\\"bagkacgza7njxwiac6p4vxcmw5gnyxs32bum5jeq6k3j7xxyzaqm7gcrw6hwa\\",\\"log\\":{\\"cid\\":\\"bagmqcgzanj72wfbfvqby3dvz3jnh5nwstmvl3nlm6kxrkgfio7z643s2qesq\\",\\"ipldBlock\\":\\"0xf87f20b87cf87a94b06e6db9288324738f04fcaac910f5a60102c1f8f842a0185d11175440fcb6458fbc1889b02953452539ed80ad1da781a5449500f6d960a0000000000000000000000000ddb18b319be3530560eecff962032dfad88212d4a0000000000000000000000000ddb18b319be3530560eecff962032dfad88212d4\\"}}"} 2
-9 0x98998f7446c63178ea77d1b184cdfe781b9cbcff27100f03806f482b354f1be9 440 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 MemberStatusUpdated {"entity":"0x8276afdf1db4e6957dd6e50fb3e6ddb56594c9adcff5403706515b9eab719f27","isMember":true} {"topics":["0x88e1b1a43f3edcb9afe941dfea296f5bc32fab715b5fc9aa101ec26d87d2e8a2","0x8276afdf1db4e6957dd6e50fb3e6ddb56594c9adcff5403706515b9eab719f27"],"data":"0x0000000000000000000000000000000000000000000000000000000000000001","tx":{"cid":"bagjqcgzatcmy65cgyyyxr2tx2gyyjtp6panzzph7e4ia6a4an5ecwnkpdpuq","txHash":"0x98998f7446c63178ea77d1b184cdfe781b9cbcff27100f03806f482b354f1be9","index":136,"src":"0xFDEa65C8e26263F6d9A1B5de9555D2931A33b825","dst":"0xB06E6DB9288324738f04fCAAc910f5A60102C1F8","__typename":"EthTransactionCid"},"eventSignature":"MemberStatusUpdated(string,bool)"} {"data":"{\\"blockHash\\":\\"0x28cb16e740cd5d7de869bee2957e7442790e9d774e6e71804a67933c7e628038\\",\\"receiptCID\\":\\"bagkacgza7njxwiac6p4vxcmw5gnyxs32bum5jeq6k3j7xxyzaqm7gcrw6hwa\\",\\"log\\":{\\"cid\\":\\"bagmqcgzaqbsfupctztrjxngvfcntxi5c4pdee5sh46wmtlbs5sbbqbplcoiq\\",\\"ipldBlock\\":\\"0xf87f20b87cf87a94b06e6db9288324738f04fcaac910f5a60102c1f8f842a088e1b1a43f3edcb9afe941dfea296f5bc32fab715b5fc9aa101ec26d87d2e8a2a08276afdf1db4e6957dd6e50fb3e6ddb56594c9adcff5403706515b9eab719f27a00000000000000000000000000000000000000000000000000000000000000001\\"}}"} 2
-10 0x930191eb049b1ce18e58b2c0017a1c3213bb509bd5469acd3b2b6c1ffc8859ff 271 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 DelegationTriggered {"principal":"0xDdb18b319BE3530560eECFF962032dFAD88212d4","agent":"0x50f01432A375DcDEa074957154e4F8d1aEB4177d"} {"topics":["0x185d11175440fcb6458fbc1889b02953452539ed80ad1da781a5449500f6d960","0x00000000000000000000000050f01432a375dcdea074957154e4f8d1aeb4177d"],"data":"0x000000000000000000000000ddb18b319be3530560eecff962032dfad88212d4","tx":{"cid":"bagjqcgzasmazd2yetmooddsywlaac6q4gij3wue32vdjvtj3fnwb77eilh7q","txHash":"0x930191eb049b1ce18e58b2c0017a1c3213bb509bd5469acd3b2b6c1ffc8859ff","index":296,"src":"0xFDEa65C8e26263F6d9A1B5de9555D2931A33b825","dst":"0xB06E6DB9288324738f04fCAAc910f5A60102C1F8","__typename":"EthTransactionCid"},"eventSignature":"DelegationTriggered(address,address)"} {"data":"{\\"blockHash\\":\\"0x6fa2fb909c9b386664c224778bf08d7d5c39bf2dd4f414a7fa1056a3c5a6e86e\\",\\"receiptCID\\":\\"bagkacgzaiwyyw2llnh3rwbyep42qkqyftchkkppb5qj5f4u6ltdz2cl5kcaa\\",\\"log\\":{\\"cid\\":\\"bagmqcgzadn3fcrvtf5wwsqprt4qjdxll76kn7teshumu3rmosxai55l3qysq\\",\\"ipldBlock\\":\\"0xf87f30b87cf87a94b06e6db9288324738f04fcaac910f5a60102c1f8f842a0185d11175440fcb6458fbc1889b02953452539ed80ad1da781a5449500f6d960a000000000000000000000000050f01432a375dcdea074957154e4f8d1aeb4177da0000000000000000000000000ddb18b319be3530560eecff962032dfad88212d4\\"}}"} 3
-11 0x930191eb049b1ce18e58b2c0017a1c3213bb509bd5469acd3b2b6c1ffc8859ff 272 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 MemberStatusUpdated {"entity":"0x5be61e7fb5d5175135aaa6b232f13d9b22a229113638cdc0bac78221ff9c9aa0","isMember":true} {"topics":["0x88e1b1a43f3edcb9afe941dfea296f5bc32fab715b5fc9aa101ec26d87d2e8a2","0x5be61e7fb5d5175135aaa6b232f13d9b22a229113638cdc0bac78221ff9c9aa0"],"data":"0x0000000000000000000000000000000000000000000000000000000000000001","tx":{"cid":"bagjqcgzasmazd2yetmooddsywlaac6q4gij3wue32vdjvtj3fnwb77eilh7q","txHash":"0x930191eb049b1ce18e58b2c0017a1c3213bb509bd5469acd3b2b6c1ffc8859ff","index":296,"src":"0xFDEa65C8e26263F6d9A1B5de9555D2931A33b825","dst":"0xB06E6DB9288324738f04fCAAc910f5A60102C1F8","__typename":"EthTransactionCid"},"eventSignature":"MemberStatusUpdated(string,bool)"} {"data":"{\\"blockHash\\":\\"0x6fa2fb909c9b386664c224778bf08d7d5c39bf2dd4f414a7fa1056a3c5a6e86e\\",\\"receiptCID\\":\\"bagkacgzaiwyyw2llnh3rwbyep42qkqyftchkkppb5qj5f4u6ltdz2cl5kcaa\\",\\"log\\":{\\"cid\\":\\"bagmqcgzaq5l7ow4vbidbo3p2djy5qy4mprqyir4dmol2uqeyvxc7fxfl4kvq\\",\\"ipldBlock\\":\\"0xf87f20b87cf87a94b06e6db9288324738f04fcaac910f5a60102c1f8f842a088e1b1a43f3edcb9afe941dfea296f5bc32fab715b5fc9aa101ec26d87d2e8a2a05be61e7fb5d5175135aaa6b232f13d9b22a229113638cdc0bac78221ff9c9aa0a00000000000000000000000000000000000000000000000000000000000000001\\"}}"} 3
-12 0x930191eb049b1ce18e58b2c0017a1c3213bb509bd5469acd3b2b6c1ffc8859ff 273 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 DelegationTriggered {"principal":"0xDdb18b319BE3530560eECFF962032dFAD88212d4","agent":"0x50f01432A375DcDEa074957154e4F8d1aEB4177d"} {"topics":["0x185d11175440fcb6458fbc1889b02953452539ed80ad1da781a5449500f6d960","0x00000000000000000000000050f01432a375dcdea074957154e4f8d1aeb4177d"],"data":"0x000000000000000000000000ddb18b319be3530560eecff962032dfad88212d4","tx":{"cid":"bagjqcgzasmazd2yetmooddsywlaac6q4gij3wue32vdjvtj3fnwb77eilh7q","txHash":"0x930191eb049b1ce18e58b2c0017a1c3213bb509bd5469acd3b2b6c1ffc8859ff","index":296,"src":"0xFDEa65C8e26263F6d9A1B5de9555D2931A33b825","dst":"0xB06E6DB9288324738f04fCAAc910f5A60102C1F8","__typename":"EthTransactionCid"},"eventSignature":"DelegationTriggered(address,address)"} {"data":"{\\"blockHash\\":\\"0x6fa2fb909c9b386664c224778bf08d7d5c39bf2dd4f414a7fa1056a3c5a6e86e\\",\\"receiptCID\\":\\"bagkacgzaiwyyw2llnh3rwbyep42qkqyftchkkppb5qj5f4u6ltdz2cl5kcaa\\",\\"log\\":{\\"cid\\":\\"bagmqcgzaas5munc2du7d2ipgyxqsa7reeueczkcfyrh5zjjesllsxatj3mgq\\",\\"ipldBlock\\":\\"0xf87f20b87cf87a94b06e6db9288324738f04fcaac910f5a60102c1f8f842a0185d11175440fcb6458fbc1889b02953452539ed80ad1da781a5449500f6d960a000000000000000000000000050f01432a375dcdea074957154e4f8d1aeb4177da0000000000000000000000000ddb18b319be3530560eecff962032dfad88212d4\\"}}"} 3
-13 0x930191eb049b1ce18e58b2c0017a1c3213bb509bd5469acd3b2b6c1ffc8859ff 274 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 MemberStatusUpdated {"entity":"0x956e5681abbafa25458057b0abaa1a3cec4108d2289954836d0c7f5b37fd6580","isMember":true} {"topics":["0x88e1b1a43f3edcb9afe941dfea296f5bc32fab715b5fc9aa101ec26d87d2e8a2","0x956e5681abbafa25458057b0abaa1a3cec4108d2289954836d0c7f5b37fd6580"],"data":"0x0000000000000000000000000000000000000000000000000000000000000001","tx":{"cid":"bagjqcgzasmazd2yetmooddsywlaac6q4gij3wue32vdjvtj3fnwb77eilh7q","txHash":"0x930191eb049b1ce18e58b2c0017a1c3213bb509bd5469acd3b2b6c1ffc8859ff","index":296,"src":"0xFDEa65C8e26263F6d9A1B5de9555D2931A33b825","dst":"0xB06E6DB9288324738f04fCAAc910f5A60102C1F8","__typename":"EthTransactionCid"},"eventSignature":"MemberStatusUpdated(string,bool)"} {"data":"{\\"blockHash\\":\\"0x6fa2fb909c9b386664c224778bf08d7d5c39bf2dd4f414a7fa1056a3c5a6e86e\\",\\"receiptCID\\":\\"bagkacgzaiwyyw2llnh3rwbyep42qkqyftchkkppb5qj5f4u6ltdz2cl5kcaa\\",\\"log\\":{\\"cid\\":\\"bagmqcgzagp47k6p3tgrom3adpx6jvr45vne2edtejaenqggtxjjfqramcmea\\",\\"ipldBlock\\":\\"0xf87f20b87cf87a94b06e6db9288324738f04fcaac910f5a60102c1f8f842a088e1b1a43f3edcb9afe941dfea296f5bc32fab715b5fc9aa101ec26d87d2e8a2a0956e5681abbafa25458057b0abaa1a3cec4108d2289954836d0c7f5b37fd6580a00000000000000000000000000000000000000000000000000000000000000001\\"}}"} 3
-14 0x88f8ea85a66f8bcf4be780449661883534f12cbb54e69c587617b4c28d1ae0ed 507 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 DelegationTriggered {"principal":"0xDdb18b319BE3530560eECFF962032dFAD88212d4","agent":"0xBc89f39d47BF0f67CA1e0C7aBBE3236F454f748a"} {"topics":["0x185d11175440fcb6458fbc1889b02953452539ed80ad1da781a5449500f6d960","0x000000000000000000000000bc89f39d47bf0f67ca1e0c7abbe3236f454f748a"],"data":"0x000000000000000000000000ddb18b319be3530560eecff962032dfad88212d4","tx":{"cid":"bagjqcgzard4ovbngn6f46s7hqbcjmymigu2pclf3kttjywdwc62mfdi24dwq","txHash":"0x88f8ea85a66f8bcf4be780449661883534f12cbb54e69c587617b4c28d1ae0ed","index":193,"src":"0x19c49117a8167296cAF5D23Ab48e355ec1c8bE8B","dst":"0xB06E6DB9288324738f04fCAAc910f5A60102C1F8","__typename":"EthTransactionCid"},"eventSignature":"DelegationTriggered(address,address)"} {"data":"{\\"blockHash\\":\\"0x0c44c52e43e8b64343475c70f37dcf01a75a6250017b1d43eda13529bb28b840\\",\\"receiptCID\\":\\"bagkacgza756voltxaaftxraxkdjhuh6jh57zla6mqkunpiajivf477kkoleq\\",\\"log\\":{\\"cid\\":\\"bagmqcgzagf7jx3lguaponolmnsjyxm2mhpkaroghk26roi7okwglucjtjs4q\\",\\"ipldBlock\\":\\"0xf87f30b87cf87a94b06e6db9288324738f04fcaac910f5a60102c1f8f842a0185d11175440fcb6458fbc1889b02953452539ed80ad1da781a5449500f6d960a0000000000000000000000000bc89f39d47bf0f67ca1e0c7abbe3236f454f748aa0000000000000000000000000ddb18b319be3530560eecff962032dfad88212d4\\"}}"} 4
-15 0x88f8ea85a66f8bcf4be780449661883534f12cbb54e69c587617b4c28d1ae0ed 508 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 MemberStatusUpdated {"entity":"0xdb00d9ee49d48ca5077597917bf50d84d2671b16a94c95fa4fa5be69bc50c03a","isMember":true} {"topics":["0x88e1b1a43f3edcb9afe941dfea296f5bc32fab715b5fc9aa101ec26d87d2e8a2","0xdb00d9ee49d48ca5077597917bf50d84d2671b16a94c95fa4fa5be69bc50c03a"],"data":"0x0000000000000000000000000000000000000000000000000000000000000001","tx":{"cid":"bagjqcgzard4ovbngn6f46s7hqbcjmymigu2pclf3kttjywdwc62mfdi24dwq","txHash":"0x88f8ea85a66f8bcf4be780449661883534f12cbb54e69c587617b4c28d1ae0ed","index":193,"src":"0x19c49117a8167296cAF5D23Ab48e355ec1c8bE8B","dst":"0xB06E6DB9288324738f04fCAAc910f5A60102C1F8","__typename":"EthTransactionCid"},"eventSignature":"MemberStatusUpdated(string,bool)"} {"data":"{\\"blockHash\\":\\"0x0c44c52e43e8b64343475c70f37dcf01a75a6250017b1d43eda13529bb28b840\\",\\"receiptCID\\":\\"bagkacgza756voltxaaftxraxkdjhuh6jh57zla6mqkunpiajivf477kkoleq\\",\\"log\\":{\\"cid\\":\\"bagmqcgzatttg7cjphkpc46klxy32jr4vfj6lxo7573nz3rob6dvnq7magsoa\\",\\"ipldBlock\\":\\"0xf87f20b87cf87a94b06e6db9288324738f04fcaac910f5a60102c1f8f842a088e1b1a43f3edcb9afe941dfea296f5bc32fab715b5fc9aa101ec26d87d2e8a2a0db00d9ee49d48ca5077597917bf50d84d2671b16a94c95fa4fa5be69bc50c03aa00000000000000000000000000000000000000000000000000000000000000001\\"}}"} 4
-16 0x88f8ea85a66f8bcf4be780449661883534f12cbb54e69c587617b4c28d1ae0ed 509 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 DelegationTriggered {"principal":"0xDdb18b319BE3530560eECFF962032dFAD88212d4","agent":"0xBc89f39d47BF0f67CA1e0C7aBBE3236F454f748a"} {"topics":["0x185d11175440fcb6458fbc1889b02953452539ed80ad1da781a5449500f6d960","0x000000000000000000000000bc89f39d47bf0f67ca1e0c7abbe3236f454f748a"],"data":"0x000000000000000000000000ddb18b319be3530560eecff962032dfad88212d4","tx":{"cid":"bagjqcgzard4ovbngn6f46s7hqbcjmymigu2pclf3kttjywdwc62mfdi24dwq","txHash":"0x88f8ea85a66f8bcf4be780449661883534f12cbb54e69c587617b4c28d1ae0ed","index":193,"src":"0x19c49117a8167296cAF5D23Ab48e355ec1c8bE8B","dst":"0xB06E6DB9288324738f04fCAAc910f5A60102C1F8","__typename":"EthTransactionCid"},"eventSignature":"DelegationTriggered(address,address)"} {"data":"{\\"blockHash\\":\\"0x0c44c52e43e8b64343475c70f37dcf01a75a6250017b1d43eda13529bb28b840\\",\\"receiptCID\\":\\"bagkacgza756voltxaaftxraxkdjhuh6jh57zla6mqkunpiajivf477kkoleq\\",\\"log\\":{\\"cid\\":\\"bagmqcgza2g5np2s2ffmppacclx3gwmrjeumoi5c44l6lt64ekctavu5f356a\\",\\"ipldBlock\\":\\"0xf87f20b87cf87a94b06e6db9288324738f04fcaac910f5a60102c1f8f842a0185d11175440fcb6458fbc1889b02953452539ed80ad1da781a5449500f6d960a0000000000000000000000000bc89f39d47bf0f67ca1e0c7abbe3236f454f748aa0000000000000000000000000ddb18b319be3530560eecff962032dfad88212d4\\"}}"} 4
-17 0x88f8ea85a66f8bcf4be780449661883534f12cbb54e69c587617b4c28d1ae0ed 510 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 MemberStatusUpdated {"entity":"0x33dc7a4e6362711b3cbdc90edcb9a621ed5c2ba73eb4adbf3e90cc21764d550d","isMember":true} {"topics":["0x88e1b1a43f3edcb9afe941dfea296f5bc32fab715b5fc9aa101ec26d87d2e8a2","0x33dc7a4e6362711b3cbdc90edcb9a621ed5c2ba73eb4adbf3e90cc21764d550d"],"data":"0x0000000000000000000000000000000000000000000000000000000000000001","tx":{"cid":"bagjqcgzard4ovbngn6f46s7hqbcjmymigu2pclf3kttjywdwc62mfdi24dwq","txHash":"0x88f8ea85a66f8bcf4be780449661883534f12cbb54e69c587617b4c28d1ae0ed","index":193,"src":"0x19c49117a8167296cAF5D23Ab48e355ec1c8bE8B","dst":"0xB06E6DB9288324738f04fCAAc910f5A60102C1F8","__typename":"EthTransactionCid"},"eventSignature":"MemberStatusUpdated(string,bool)"} {"data":"{\\"blockHash\\":\\"0x0c44c52e43e8b64343475c70f37dcf01a75a6250017b1d43eda13529bb28b840\\",\\"receiptCID\\":\\"bagkacgza756voltxaaftxraxkdjhuh6jh57zla6mqkunpiajivf477kkoleq\\",\\"log\\":{\\"cid\\":\\"bagmqcgzamjreuppb5xkmjdelhahazmb54mzykjufxj4fvo42u26iqxuxpzdq\\",\\"ipldBlock\\":\\"0xf87f20b87cf87a94b06e6db9288324738f04fcaac910f5a60102c1f8f842a088e1b1a43f3edcb9afe941dfea296f5bc32fab715b5fc9aa101ec26d87d2e8a2a033dc7a4e6362711b3cbdc90edcb9a621ed5c2ba73eb4adbf3e90cc21764d550da00000000000000000000000000000000000000000000000000000000000000001\\"}}"} 4
-18 0x88f8ea85a66f8bcf4be780449661883534f12cbb54e69c587617b4c28d1ae0ed 511 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 DelegationTriggered {"principal":"0xDdb18b319BE3530560eECFF962032dFAD88212d4","agent":"0xBc89f39d47BF0f67CA1e0C7aBBE3236F454f748a"} {"topics":["0x185d11175440fcb6458fbc1889b02953452539ed80ad1da781a5449500f6d960","0x000000000000000000000000bc89f39d47bf0f67ca1e0c7abbe3236f454f748a"],"data":"0x000000000000000000000000ddb18b319be3530560eecff962032dfad88212d4","tx":{"cid":"bagjqcgzard4ovbngn6f46s7hqbcjmymigu2pclf3kttjywdwc62mfdi24dwq","txHash":"0x88f8ea85a66f8bcf4be780449661883534f12cbb54e69c587617b4c28d1ae0ed","index":193,"src":"0x19c49117a8167296cAF5D23Ab48e355ec1c8bE8B","dst":"0xB06E6DB9288324738f04fCAAc910f5A60102C1F8","__typename":"EthTransactionCid"},"eventSignature":"DelegationTriggered(address,address)"} {"data":"{\\"blockHash\\":\\"0x0c44c52e43e8b64343475c70f37dcf01a75a6250017b1d43eda13529bb28b840\\",\\"receiptCID\\":\\"bagkacgza756voltxaaftxraxkdjhuh6jh57zla6mqkunpiajivf477kkoleq\\",\\"log\\":{\\"cid\\":\\"bagmqcgza2g5np2s2ffmppacclx3gwmrjeumoi5c44l6lt64ekctavu5f356a\\",\\"ipldBlock\\":\\"0xf87f20b87cf87a94b06e6db9288324738f04fcaac910f5a60102c1f8f842a0185d11175440fcb6458fbc1889b02953452539ed80ad1da781a5449500f6d960a0000000000000000000000000bc89f39d47bf0f67ca1e0c7abbe3236f454f748aa0000000000000000000000000ddb18b319be3530560eecff962032dfad88212d4\\"}}"} 4
-19 0x88f8ea85a66f8bcf4be780449661883534f12cbb54e69c587617b4c28d1ae0ed 512 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 MemberStatusUpdated {"entity":"0xdef5c249e7975deeacae0568ccd7ad10f4b482c4ef3476bf448ff9bb6167731f","isMember":true} {"topics":["0x88e1b1a43f3edcb9afe941dfea296f5bc32fab715b5fc9aa101ec26d87d2e8a2","0xdef5c249e7975deeacae0568ccd7ad10f4b482c4ef3476bf448ff9bb6167731f"],"data":"0x0000000000000000000000000000000000000000000000000000000000000001","tx":{"cid":"bagjqcgzard4ovbngn6f46s7hqbcjmymigu2pclf3kttjywdwc62mfdi24dwq","txHash":"0x88f8ea85a66f8bcf4be780449661883534f12cbb54e69c587617b4c28d1ae0ed","index":193,"src":"0x19c49117a8167296cAF5D23Ab48e355ec1c8bE8B","dst":"0xB06E6DB9288324738f04fCAAc910f5A60102C1F8","__typename":"EthTransactionCid"},"eventSignature":"MemberStatusUpdated(string,bool)"} {"data":"{\\"blockHash\\":\\"0x0c44c52e43e8b64343475c70f37dcf01a75a6250017b1d43eda13529bb28b840\\",\\"receiptCID\\":\\"bagkacgza756voltxaaftxraxkdjhuh6jh57zla6mqkunpiajivf477kkoleq\\",\\"log\\":{\\"cid\\":\\"bagmqcgzaeokcjndceushmyfhdkag7fwkg25knbwoxjxqlqhjlrkgmhjj27hq\\",\\"ipldBlock\\":\\"0xf87f20b87cf87a94b06e6db9288324738f04fcaac910f5a60102c1f8f842a088e1b1a43f3edcb9afe941dfea296f5bc32fab715b5fc9aa101ec26d87d2e8a2a0def5c249e7975deeacae0568ccd7ad10f4b482c4ef3476bf448ff9bb6167731fa00000000000000000000000000000000000000000000000000000000000000001\\"}}"} 4
-20 0x88f8ea85a66f8bcf4be780449661883534f12cbb54e69c587617b4c28d1ae0ed 513 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 DelegationTriggered {"principal":"0xDdb18b319BE3530560eECFF962032dFAD88212d4","agent":"0xBc89f39d47BF0f67CA1e0C7aBBE3236F454f748a"} {"topics":["0x185d11175440fcb6458fbc1889b02953452539ed80ad1da781a5449500f6d960","0x000000000000000000000000bc89f39d47bf0f67ca1e0c7abbe3236f454f748a"],"data":"0x000000000000000000000000ddb18b319be3530560eecff962032dfad88212d4","tx":{"cid":"bagjqcgzard4ovbngn6f46s7hqbcjmymigu2pclf3kttjywdwc62mfdi24dwq","txHash":"0x88f8ea85a66f8bcf4be780449661883534f12cbb54e69c587617b4c28d1ae0ed","index":193,"src":"0x19c49117a8167296cAF5D23Ab48e355ec1c8bE8B","dst":"0xB06E6DB9288324738f04fCAAc910f5A60102C1F8","__typename":"EthTransactionCid"},"eventSignature":"DelegationTriggered(address,address)"} {"data":"{\\"blockHash\\":\\"0x0c44c52e43e8b64343475c70f37dcf01a75a6250017b1d43eda13529bb28b840\\",\\"receiptCID\\":\\"bagkacgza756voltxaaftxraxkdjhuh6jh57zla6mqkunpiajivf477kkoleq\\",\\"log\\":{\\"cid\\":\\"bagmqcgza2g5np2s2ffmppacclx3gwmrjeumoi5c44l6lt64ekctavu5f356a\\",\\"ipldBlock\\":\\"0xf87f20b87cf87a94b06e6db9288324738f04fcaac910f5a60102c1f8f842a0185d11175440fcb6458fbc1889b02953452539ed80ad1da781a5449500f6d960a0000000000000000000000000bc89f39d47bf0f67ca1e0c7abbe3236f454f748aa0000000000000000000000000ddb18b319be3530560eecff962032dfad88212d4\\"}}"} 4
-21 0x88f8ea85a66f8bcf4be780449661883534f12cbb54e69c587617b4c28d1ae0ed 514 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 MemberStatusUpdated {"entity":"0x165892f97103f95276884abea5e604985437687a8e5b35ac4428098f69c66a9f","isMember":true} {"topics":["0x88e1b1a43f3edcb9afe941dfea296f5bc32fab715b5fc9aa101ec26d87d2e8a2","0x165892f97103f95276884abea5e604985437687a8e5b35ac4428098f69c66a9f"],"data":"0x0000000000000000000000000000000000000000000000000000000000000001","tx":{"cid":"bagjqcgzard4ovbngn6f46s7hqbcjmymigu2pclf3kttjywdwc62mfdi24dwq","txHash":"0x88f8ea85a66f8bcf4be780449661883534f12cbb54e69c587617b4c28d1ae0ed","index":193,"src":"0x19c49117a8167296cAF5D23Ab48e355ec1c8bE8B","dst":"0xB06E6DB9288324738f04fCAAc910f5A60102C1F8","__typename":"EthTransactionCid"},"eventSignature":"MemberStatusUpdated(string,bool)"} {"data":"{\\"blockHash\\":\\"0x0c44c52e43e8b64343475c70f37dcf01a75a6250017b1d43eda13529bb28b840\\",\\"receiptCID\\":\\"bagkacgza756voltxaaftxraxkdjhuh6jh57zla6mqkunpiajivf477kkoleq\\",\\"log\\":{\\"cid\\":\\"bagmqcgzanwfms4swgcarbwfosr7uhmyxsefofusyj6m2oyoxy54zldewkeda\\",\\"ipldBlock\\":\\"0xf87f20b87cf87a94b06e6db9288324738f04fcaac910f5a60102c1f8f842a088e1b1a43f3edcb9afe941dfea296f5bc32fab715b5fc9aa101ec26d87d2e8a2a0165892f97103f95276884abea5e604985437687a8e5b35ac4428098f69c66a9fa00000000000000000000000000000000000000000000000000000000000000001\\"}}"} 4
-22 0x88f8ea85a66f8bcf4be780449661883534f12cbb54e69c587617b4c28d1ae0ed 515 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 DelegationTriggered {"principal":"0xDdb18b319BE3530560eECFF962032dFAD88212d4","agent":"0xBc89f39d47BF0f67CA1e0C7aBBE3236F454f748a"} {"topics":["0x185d11175440fcb6458fbc1889b02953452539ed80ad1da781a5449500f6d960","0x000000000000000000000000bc89f39d47bf0f67ca1e0c7abbe3236f454f748a"],"data":"0x000000000000000000000000ddb18b319be3530560eecff962032dfad88212d4","tx":{"cid":"bagjqcgzard4ovbngn6f46s7hqbcjmymigu2pclf3kttjywdwc62mfdi24dwq","txHash":"0x88f8ea85a66f8bcf4be780449661883534f12cbb54e69c587617b4c28d1ae0ed","index":193,"src":"0x19c49117a8167296cAF5D23Ab48e355ec1c8bE8B","dst":"0xB06E6DB9288324738f04fCAAc910f5A60102C1F8","__typename":"EthTransactionCid"},"eventSignature":"DelegationTriggered(address,address)"} {"data":"{\\"blockHash\\":\\"0x0c44c52e43e8b64343475c70f37dcf01a75a6250017b1d43eda13529bb28b840\\",\\"receiptCID\\":\\"bagkacgza756voltxaaftxraxkdjhuh6jh57zla6mqkunpiajivf477kkoleq\\",\\"log\\":{\\"cid\\":\\"bagmqcgza2g5np2s2ffmppacclx3gwmrjeumoi5c44l6lt64ekctavu5f356a\\",\\"ipldBlock\\":\\"0xf87f20b87cf87a94b06e6db9288324738f04fcaac910f5a60102c1f8f842a0185d11175440fcb6458fbc1889b02953452539ed80ad1da781a5449500f6d960a0000000000000000000000000bc89f39d47bf0f67ca1e0c7abbe3236f454f748aa0000000000000000000000000ddb18b319be3530560eecff962032dfad88212d4\\"}}"} 4
-23 0x88f8ea85a66f8bcf4be780449661883534f12cbb54e69c587617b4c28d1ae0ed 516 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 MemberStatusUpdated {"entity":"0x4e47d3592c7c70485bf59f3aae389fbc82455da11000f53ac0665c5e343c8e14","isMember":true} {"topics":["0x88e1b1a43f3edcb9afe941dfea296f5bc32fab715b5fc9aa101ec26d87d2e8a2","0x4e47d3592c7c70485bf59f3aae389fbc82455da11000f53ac0665c5e343c8e14"],"data":"0x0000000000000000000000000000000000000000000000000000000000000001","tx":{"cid":"bagjqcgzard4ovbngn6f46s7hqbcjmymigu2pclf3kttjywdwc62mfdi24dwq","txHash":"0x88f8ea85a66f8bcf4be780449661883534f12cbb54e69c587617b4c28d1ae0ed","index":193,"src":"0x19c49117a8167296cAF5D23Ab48e355ec1c8bE8B","dst":"0xB06E6DB9288324738f04fCAAc910f5A60102C1F8","__typename":"EthTransactionCid"},"eventSignature":"MemberStatusUpdated(string,bool)"} {"data":"{\\"blockHash\\":\\"0x0c44c52e43e8b64343475c70f37dcf01a75a6250017b1d43eda13529bb28b840\\",\\"receiptCID\\":\\"bagkacgza756voltxaaftxraxkdjhuh6jh57zla6mqkunpiajivf477kkoleq\\",\\"log\\":{\\"cid\\":\\"bagmqcgza5yzqveeeqvq4wabjxyulanz6ynqe2vhjhwplkff4xjlwkjve3cta\\",\\"ipldBlock\\":\\"0xf87f20b87cf87a94b06e6db9288324738f04fcaac910f5a60102c1f8f842a088e1b1a43f3edcb9afe941dfea296f5bc32fab715b5fc9aa101ec26d87d2e8a2a04e47d3592c7c70485bf59f3aae389fbc82455da11000f53ac0665c5e343c8e14a00000000000000000000000000000000000000000000000000000000000000001\\"}}"} 4
-24 0x88f8ea85a66f8bcf4be780449661883534f12cbb54e69c587617b4c28d1ae0ed 517 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 DelegationTriggered {"principal":"0xDdb18b319BE3530560eECFF962032dFAD88212d4","agent":"0xBc89f39d47BF0f67CA1e0C7aBBE3236F454f748a"} {"topics":["0x185d11175440fcb6458fbc1889b02953452539ed80ad1da781a5449500f6d960","0x000000000000000000000000bc89f39d47bf0f67ca1e0c7abbe3236f454f748a"],"data":"0x000000000000000000000000ddb18b319be3530560eecff962032dfad88212d4","tx":{"cid":"bagjqcgzard4ovbngn6f46s7hqbcjmymigu2pclf3kttjywdwc62mfdi24dwq","txHash":"0x88f8ea85a66f8bcf4be780449661883534f12cbb54e69c587617b4c28d1ae0ed","index":193,"src":"0x19c49117a8167296cAF5D23Ab48e355ec1c8bE8B","dst":"0xB06E6DB9288324738f04fCAAc910f5A60102C1F8","__typename":"EthTransactionCid"},"eventSignature":"DelegationTriggered(address,address)"} {"data":"{\\"blockHash\\":\\"0x0c44c52e43e8b64343475c70f37dcf01a75a6250017b1d43eda13529bb28b840\\",\\"receiptCID\\":\\"bagkacgza756voltxaaftxraxkdjhuh6jh57zla6mqkunpiajivf477kkoleq\\",\\"log\\":{\\"cid\\":\\"bagmqcgza2g5np2s2ffmppacclx3gwmrjeumoi5c44l6lt64ekctavu5f356a\\",\\"ipldBlock\\":\\"0xf87f20b87cf87a94b06e6db9288324738f04fcaac910f5a60102c1f8f842a0185d11175440fcb6458fbc1889b02953452539ed80ad1da781a5449500f6d960a0000000000000000000000000bc89f39d47bf0f67ca1e0c7abbe3236f454f748aa0000000000000000000000000ddb18b319be3530560eecff962032dfad88212d4\\"}}"} 4
-25 0x88f8ea85a66f8bcf4be780449661883534f12cbb54e69c587617b4c28d1ae0ed 518 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 MemberStatusUpdated {"entity":"0x16a1ef186d11b33d747c8c44fc8bf3445db567cd5ab29d9e2c1c81781a51647a","isMember":true} {"topics":["0x88e1b1a43f3edcb9afe941dfea296f5bc32fab715b5fc9aa101ec26d87d2e8a2","0x16a1ef186d11b33d747c8c44fc8bf3445db567cd5ab29d9e2c1c81781a51647a"],"data":"0x0000000000000000000000000000000000000000000000000000000000000001","tx":{"cid":"bagjqcgzard4ovbngn6f46s7hqbcjmymigu2pclf3kttjywdwc62mfdi24dwq","txHash":"0x88f8ea85a66f8bcf4be780449661883534f12cbb54e69c587617b4c28d1ae0ed","index":193,"src":"0x19c49117a8167296cAF5D23Ab48e355ec1c8bE8B","dst":"0xB06E6DB9288324738f04fCAAc910f5A60102C1F8","__typename":"EthTransactionCid"},"eventSignature":"MemberStatusUpdated(string,bool)"} {"data":"{\\"blockHash\\":\\"0x0c44c52e43e8b64343475c70f37dcf01a75a6250017b1d43eda13529bb28b840\\",\\"receiptCID\\":\\"bagkacgza756voltxaaftxraxkdjhuh6jh57zla6mqkunpiajivf477kkoleq\\",\\"log\\":{\\"cid\\":\\"bagmqcgzagwxahowqxuwrld5k2yr5vzhkoelh7hu46dpvctllikaodxbn5yyq\\",\\"ipldBlock\\":\\"0xf87f20b87cf87a94b06e6db9288324738f04fcaac910f5a60102c1f8f842a088e1b1a43f3edcb9afe941dfea296f5bc32fab715b5fc9aa101ec26d87d2e8a2a016a1ef186d11b33d747c8c44fc8bf3445db567cd5ab29d9e2c1c81781a51647aa00000000000000000000000000000000000000000000000000000000000000001\\"}}"} 4
-26 0x6e2401fdf1301a0700ab604be31485a5a2e76b1a781ec3a4eff1e8100db80719 118 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 DelegationTriggered {"principal":"0xDdb18b319BE3530560eECFF962032dFAD88212d4","agent":"0x8C38B6212D6A78EB7a2DA7E204fBfe003903CF47"} {"topics":["0x185d11175440fcb6458fbc1889b02953452539ed80ad1da781a5449500f6d960","0x0000000000000000000000008c38b6212d6a78eb7a2da7e204fbfe003903cf47"],"data":"0x000000000000000000000000ddb18b319be3530560eecff962032dfad88212d4","tx":{"cid":"bagjqcgzanysad7prganaoaflmbf6gfefuwroo2y2papmhjhp6hubadnya4mq","txHash":"0x6e2401fdf1301a0700ab604be31485a5a2e76b1a781ec3a4eff1e8100db80719","index":56,"src":"0xE8D848debB3A3e12AA815b15900c8E020B863F31","dst":"0xB06E6DB9288324738f04fCAAc910f5A60102C1F8","__typename":"EthTransactionCid"},"eventSignature":"DelegationTriggered(address,address)"} {"data":"{\\"blockHash\\":\\"0xafb470605fd86995175c2bb07ed62d9f78d1debff33ce2fc6f8d5f07a9ebeca2\\",\\"receiptCID\\":\\"bagkacgzaklu3ddgwwsmemfw5b2wgfs6c62euf233o3tslufku4u2v4bdt7za\\",\\"log\\":{\\"cid\\":\\"bagmqcgzaja54iaazd37cfk6pkqnkidfyguloff4er2e57oavnessaunweyma\\",\\"ipldBlock\\":\\"0xf87f30b87cf87a94b06e6db9288324738f04fcaac910f5a60102c1f8f842a0185d11175440fcb6458fbc1889b02953452539ed80ad1da781a5449500f6d960a00000000000000000000000008c38b6212d6a78eb7a2da7e204fbfe003903cf47a0000000000000000000000000ddb18b319be3530560eecff962032dfad88212d4\\"}}"} 5
-27 0x6e2401fdf1301a0700ab604be31485a5a2e76b1a781ec3a4eff1e8100db80719 119 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 MemberStatusUpdated {"entity":"0x1c27f716f8d8b62fd373e4f08eb48277c22fbb3b3d146ba67313ab3b6d046fd0","isMember":true} {"topics":["0x88e1b1a43f3edcb9afe941dfea296f5bc32fab715b5fc9aa101ec26d87d2e8a2","0x1c27f716f8d8b62fd373e4f08eb48277c22fbb3b3d146ba67313ab3b6d046fd0"],"data":"0x0000000000000000000000000000000000000000000000000000000000000001","tx":{"cid":"bagjqcgzanysad7prganaoaflmbf6gfefuwroo2y2papmhjhp6hubadnya4mq","txHash":"0x6e2401fdf1301a0700ab604be31485a5a2e76b1a781ec3a4eff1e8100db80719","index":56,"src":"0xE8D848debB3A3e12AA815b15900c8E020B863F31","dst":"0xB06E6DB9288324738f04fCAAc910f5A60102C1F8","__typename":"EthTransactionCid"},"eventSignature":"MemberStatusUpdated(string,bool)"} {"data":"{\\"blockHash\\":\\"0xafb470605fd86995175c2bb07ed62d9f78d1debff33ce2fc6f8d5f07a9ebeca2\\",\\"receiptCID\\":\\"bagkacgzaklu3ddgwwsmemfw5b2wgfs6c62euf233o3tslufku4u2v4bdt7za\\",\\"log\\":{\\"cid\\":\\"bagmqcgzagx4cimqpipdrqbxwlw44tfvzedutvmqk4euok6d4n3ge77r2xloq\\",\\"ipldBlock\\":\\"0xf87f31b87cf87a94b06e6db9288324738f04fcaac910f5a60102c1f8f842a088e1b1a43f3edcb9afe941dfea296f5bc32fab715b5fc9aa101ec26d87d2e8a2a01c27f716f8d8b62fd373e4f08eb48277c22fbb3b3d146ba67313ab3b6d046fd0a00000000000000000000000000000000000000000000000000000000000000001\\"}}"} 5
-28 0x66ae1a7e87d6fb8b791fd98a7b5f001692626feec93f7607e945fd8450c8f9e3 225 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 DelegationTriggered {"principal":"0xDdb18b319BE3530560eECFF962032dFAD88212d4","agent":"0xDdb18b319BE3530560eECFF962032dFAD88212d4"} {"topics":["0x185d11175440fcb6458fbc1889b02953452539ed80ad1da781a5449500f6d960","0x000000000000000000000000ddb18b319be3530560eecff962032dfad88212d4"],"data":"0x000000000000000000000000ddb18b319be3530560eecff962032dfad88212d4","tx":{"cid":"bagjqcgzam2xbu7uh235yw6i73gfhwxyac2jge37oze7xmb7jix6yiugi7hrq","txHash":"0x66ae1a7e87d6fb8b791fd98a7b5f001692626feec93f7607e945fd8450c8f9e3","index":438,"src":"0xFDEa65C8e26263F6d9A1B5de9555D2931A33b825","dst":"0xB06E6DB9288324738f04fCAAc910f5A60102C1F8","__typename":"EthTransactionCid"},"eventSignature":"DelegationTriggered(address,address)"} {"data":"{\\"blockHash\\":\\"0x1f1f9ddf5435def50e966563a68afc302b1d43b1ead0c15a3b3397a17c452eb9\\",\\"receiptCID\\":\\"bagkacgzaickyui2bivfkwglvhlgs3dzbgzllvitvssccwsyg6evimm4hfaga\\",\\"log\\":{\\"cid\\":\\"bagmqcgzaz22koutltuxcphbuc72dcdt6xuqr2e3mk4w75xksg2zzqaynbmoa\\",\\"ipldBlock\\":\\"0xf87f30b87cf87a94b06e6db9288324738f04fcaac910f5a60102c1f8f842a0185d11175440fcb6458fbc1889b02953452539ed80ad1da781a5449500f6d960a0000000000000000000000000ddb18b319be3530560eecff962032dfad88212d4a0000000000000000000000000ddb18b319be3530560eecff962032dfad88212d4\\"}}"} 6
-29 0x66ae1a7e87d6fb8b791fd98a7b5f001692626feec93f7607e945fd8450c8f9e3 226 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 PhisherStatusUpdated {"entity":"0xd03b69864961ea513339c2896c365ffde0e6620a1ab832d93c6656f8ce6f988e","isPhisher":true} {"topics":["0x9d3712f4978fc20b17a1dfbcd563f9aded75d05b6019427a9eca23245220138b","0xd03b69864961ea513339c2896c365ffde0e6620a1ab832d93c6656f8ce6f988e"],"data":"0x0000000000000000000000000000000000000000000000000000000000000001","tx":{"cid":"bagjqcgzam2xbu7uh235yw6i73gfhwxyac2jge37oze7xmb7jix6yiugi7hrq","txHash":"0x66ae1a7e87d6fb8b791fd98a7b5f001692626feec93f7607e945fd8450c8f9e3","index":438,"src":"0xFDEa65C8e26263F6d9A1B5de9555D2931A33b825","dst":"0xB06E6DB9288324738f04fCAAc910f5A60102C1F8","__typename":"EthTransactionCid"},"eventSignature":"PhisherStatusUpdated(string,bool)"} {"data":"{\\"blockHash\\":\\"0x1f1f9ddf5435def50e966563a68afc302b1d43b1ead0c15a3b3397a17c452eb9\\",\\"receiptCID\\":\\"bagkacgzaickyui2bivfkwglvhlgs3dzbgzllvitvssccwsyg6evimm4hfaga\\",\\"log\\":{\\"cid\\":\\"bagmqcgza2uylmeipltns5rcegmzev2dtcpm3yf7exr7azelvmmc45p7en3na\\",\\"ipldBlock\\":\\"0xf87f20b87cf87a94b06e6db9288324738f04fcaac910f5a60102c1f8f842a09d3712f4978fc20b17a1dfbcd563f9aded75d05b6019427a9eca23245220138ba0d03b69864961ea513339c2896c365ffde0e6620a1ab832d93c6656f8ce6f988ea00000000000000000000000000000000000000000000000000000000000000001\\"}}"} 6
-30 0x66ae1a7e87d6fb8b791fd98a7b5f001692626feec93f7607e945fd8450c8f9e3 227 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 DelegationTriggered {"principal":"0xDdb18b319BE3530560eECFF962032dFAD88212d4","agent":"0xDdb18b319BE3530560eECFF962032dFAD88212d4"} {"topics":["0x185d11175440fcb6458fbc1889b02953452539ed80ad1da781a5449500f6d960","0x000000000000000000000000ddb18b319be3530560eecff962032dfad88212d4"],"data":"0x000000000000000000000000ddb18b319be3530560eecff962032dfad88212d4","tx":{"cid":"bagjqcgzam2xbu7uh235yw6i73gfhwxyac2jge37oze7xmb7jix6yiugi7hrq","txHash":"0x66ae1a7e87d6fb8b791fd98a7b5f001692626feec93f7607e945fd8450c8f9e3","index":438,"src":"0xFDEa65C8e26263F6d9A1B5de9555D2931A33b825","dst":"0xB06E6DB9288324738f04fCAAc910f5A60102C1F8","__typename":"EthTransactionCid"},"eventSignature":"DelegationTriggered(address,address)"} {"data":"{\\"blockHash\\":\\"0x1f1f9ddf5435def50e966563a68afc302b1d43b1ead0c15a3b3397a17c452eb9\\",\\"receiptCID\\":\\"bagkacgzaickyui2bivfkwglvhlgs3dzbgzllvitvssccwsyg6evimm4hfaga\\",\\"log\\":{\\"cid\\":\\"bagmqcgzanj72wfbfvqby3dvz3jnh5nwstmvl3nlm6kxrkgfio7z643s2qesq\\",\\"ipldBlock\\":\\"0xf87f20b87cf87a94b06e6db9288324738f04fcaac910f5a60102c1f8f842a0185d11175440fcb6458fbc1889b02953452539ed80ad1da781a5449500f6d960a0000000000000000000000000ddb18b319be3530560eecff962032dfad88212d4a0000000000000000000000000ddb18b319be3530560eecff962032dfad88212d4\\"}}"} 6
-31 0x66ae1a7e87d6fb8b791fd98a7b5f001692626feec93f7607e945fd8450c8f9e3 228 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 PhisherStatusUpdated {"entity":"0xb3beb6867a4bef1f11b65e036b831cd3b81e74898005c13110e0539fc74e8183","isPhisher":true} {"topics":["0x9d3712f4978fc20b17a1dfbcd563f9aded75d05b6019427a9eca23245220138b","0xb3beb6867a4bef1f11b65e036b831cd3b81e74898005c13110e0539fc74e8183"],"data":"0x0000000000000000000000000000000000000000000000000000000000000001","tx":{"cid":"bagjqcgzam2xbu7uh235yw6i73gfhwxyac2jge37oze7xmb7jix6yiugi7hrq","txHash":"0x66ae1a7e87d6fb8b791fd98a7b5f001692626feec93f7607e945fd8450c8f9e3","index":438,"src":"0xFDEa65C8e26263F6d9A1B5de9555D2931A33b825","dst":"0xB06E6DB9288324738f04fCAAc910f5A60102C1F8","__typename":"EthTransactionCid"},"eventSignature":"PhisherStatusUpdated(string,bool)"} {"data":"{\\"blockHash\\":\\"0x1f1f9ddf5435def50e966563a68afc302b1d43b1ead0c15a3b3397a17c452eb9\\",\\"receiptCID\\":\\"bagkacgzaickyui2bivfkwglvhlgs3dzbgzllvitvssccwsyg6evimm4hfaga\\",\\"log\\":{\\"cid\\":\\"bagmqcgza23km44tuxt7uhtvhagfn4imaoctdxsvobpdgqtjpunsd7gk3owwq\\",\\"ipldBlock\\":\\"0xf87f20b87cf87a94b06e6db9288324738f04fcaac910f5a60102c1f8f842a09d3712f4978fc20b17a1dfbcd563f9aded75d05b6019427a9eca23245220138ba0b3beb6867a4bef1f11b65e036b831cd3b81e74898005c13110e0539fc74e8183a00000000000000000000000000000000000000000000000000000000000000001\\"}}"} 6
-32 0x66ae1a7e87d6fb8b791fd98a7b5f001692626feec93f7607e945fd8450c8f9e3 229 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 DelegationTriggered {"principal":"0xDdb18b319BE3530560eECFF962032dFAD88212d4","agent":"0xDdb18b319BE3530560eECFF962032dFAD88212d4"} {"topics":["0x185d11175440fcb6458fbc1889b02953452539ed80ad1da781a5449500f6d960","0x000000000000000000000000ddb18b319be3530560eecff962032dfad88212d4"],"data":"0x000000000000000000000000ddb18b319be3530560eecff962032dfad88212d4","tx":{"cid":"bagjqcgzam2xbu7uh235yw6i73gfhwxyac2jge37oze7xmb7jix6yiugi7hrq","txHash":"0x66ae1a7e87d6fb8b791fd98a7b5f001692626feec93f7607e945fd8450c8f9e3","index":438,"src":"0xFDEa65C8e26263F6d9A1B5de9555D2931A33b825","dst":"0xB06E6DB9288324738f04fCAAc910f5A60102C1F8","__typename":"EthTransactionCid"},"eventSignature":"DelegationTriggered(address,address)"} {"data":"{\\"blockHash\\":\\"0x1f1f9ddf5435def50e966563a68afc302b1d43b1ead0c15a3b3397a17c452eb9\\",\\"receiptCID\\":\\"bagkacgzaickyui2bivfkwglvhlgs3dzbgzllvitvssccwsyg6evimm4hfaga\\",\\"log\\":{\\"cid\\":\\"bagmqcgzanj72wfbfvqby3dvz3jnh5nwstmvl3nlm6kxrkgfio7z643s2qesq\\",\\"ipldBlock\\":\\"0xf87f20b87cf87a94b06e6db9288324738f04fcaac910f5a60102c1f8f842a0185d11175440fcb6458fbc1889b02953452539ed80ad1da781a5449500f6d960a0000000000000000000000000ddb18b319be3530560eecff962032dfad88212d4a0000000000000000000000000ddb18b319be3530560eecff962032dfad88212d4\\"}}"} 6
-33 0x66ae1a7e87d6fb8b791fd98a7b5f001692626feec93f7607e945fd8450c8f9e3 230 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 PhisherStatusUpdated {"entity":"0xed6ad0a79ec0ad3e559cf0f958d9e28c6e6bf6be025a8249a975c9a8e2180acf","isPhisher":true} {"topics":["0x9d3712f4978fc20b17a1dfbcd563f9aded75d05b6019427a9eca23245220138b","0xed6ad0a79ec0ad3e559cf0f958d9e28c6e6bf6be025a8249a975c9a8e2180acf"],"data":"0x0000000000000000000000000000000000000000000000000000000000000001","tx":{"cid":"bagjqcgzam2xbu7uh235yw6i73gfhwxyac2jge37oze7xmb7jix6yiugi7hrq","txHash":"0x66ae1a7e87d6fb8b791fd98a7b5f001692626feec93f7607e945fd8450c8f9e3","index":438,"src":"0xFDEa65C8e26263F6d9A1B5de9555D2931A33b825","dst":"0xB06E6DB9288324738f04fCAAc910f5A60102C1F8","__typename":"EthTransactionCid"},"eventSignature":"PhisherStatusUpdated(string,bool)"} {"data":"{\\"blockHash\\":\\"0x1f1f9ddf5435def50e966563a68afc302b1d43b1ead0c15a3b3397a17c452eb9\\",\\"receiptCID\\":\\"bagkacgzaickyui2bivfkwglvhlgs3dzbgzllvitvssccwsyg6evimm4hfaga\\",\\"log\\":{\\"cid\\":\\"bagmqcgzatns5jnxezocu52ibouvcladwphpkervyibz35llxy4kxra5kqrxq\\",\\"ipldBlock\\":\\"0xf87f20b87cf87a94b06e6db9288324738f04fcaac910f5a60102c1f8f842a09d3712f4978fc20b17a1dfbcd563f9aded75d05b6019427a9eca23245220138ba0ed6ad0a79ec0ad3e559cf0f958d9e28c6e6bf6be025a8249a975c9a8e2180acfa00000000000000000000000000000000000000000000000000000000000000001\\"}}"} 6
-34 0x66ae1a7e87d6fb8b791fd98a7b5f001692626feec93f7607e945fd8450c8f9e3 231 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 DelegationTriggered {"principal":"0xDdb18b319BE3530560eECFF962032dFAD88212d4","agent":"0xDdb18b319BE3530560eECFF962032dFAD88212d4"} {"topics":["0x185d11175440fcb6458fbc1889b02953452539ed80ad1da781a5449500f6d960","0x000000000000000000000000ddb18b319be3530560eecff962032dfad88212d4"],"data":"0x000000000000000000000000ddb18b319be3530560eecff962032dfad88212d4","tx":{"cid":"bagjqcgzam2xbu7uh235yw6i73gfhwxyac2jge37oze7xmb7jix6yiugi7hrq","txHash":"0x66ae1a7e87d6fb8b791fd98a7b5f001692626feec93f7607e945fd8450c8f9e3","index":438,"src":"0xFDEa65C8e26263F6d9A1B5de9555D2931A33b825","dst":"0xB06E6DB9288324738f04fCAAc910f5A60102C1F8","__typename":"EthTransactionCid"},"eventSignature":"DelegationTriggered(address,address)"} {"data":"{\\"blockHash\\":\\"0x1f1f9ddf5435def50e966563a68afc302b1d43b1ead0c15a3b3397a17c452eb9\\",\\"receiptCID\\":\\"bagkacgzaickyui2bivfkwglvhlgs3dzbgzllvitvssccwsyg6evimm4hfaga\\",\\"log\\":{\\"cid\\":\\"bagmqcgzanj72wfbfvqby3dvz3jnh5nwstmvl3nlm6kxrkgfio7z643s2qesq\\",\\"ipldBlock\\":\\"0xf87f20b87cf87a94b06e6db9288324738f04fcaac910f5a60102c1f8f842a0185d11175440fcb6458fbc1889b02953452539ed80ad1da781a5449500f6d960a0000000000000000000000000ddb18b319be3530560eecff962032dfad88212d4a0000000000000000000000000ddb18b319be3530560eecff962032dfad88212d4\\"}}"} 6
-35 0x66ae1a7e87d6fb8b791fd98a7b5f001692626feec93f7607e945fd8450c8f9e3 232 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 PhisherStatusUpdated {"entity":"0x8f9e6c0c3630ec9bccfb22c903753257d2352a9800255daafcf1665ed3d4be45","isPhisher":true} {"topics":["0x9d3712f4978fc20b17a1dfbcd563f9aded75d05b6019427a9eca23245220138b","0x8f9e6c0c3630ec9bccfb22c903753257d2352a9800255daafcf1665ed3d4be45"],"data":"0x0000000000000000000000000000000000000000000000000000000000000001","tx":{"cid":"bagjqcgzam2xbu7uh235yw6i73gfhwxyac2jge37oze7xmb7jix6yiugi7hrq","txHash":"0x66ae1a7e87d6fb8b791fd98a7b5f001692626feec93f7607e945fd8450c8f9e3","index":438,"src":"0xFDEa65C8e26263F6d9A1B5de9555D2931A33b825","dst":"0xB06E6DB9288324738f04fCAAc910f5A60102C1F8","__typename":"EthTransactionCid"},"eventSignature":"PhisherStatusUpdated(string,bool)"} {"data":"{\\"blockHash\\":\\"0x1f1f9ddf5435def50e966563a68afc302b1d43b1ead0c15a3b3397a17c452eb9\\",\\"receiptCID\\":\\"bagkacgzaickyui2bivfkwglvhlgs3dzbgzllvitvssccwsyg6evimm4hfaga\\",\\"log\\":{\\"cid\\":\\"bagmqcgzaeb4dn6y2qmnizhopkyr7poewd66gm2brx76cskal6kv5pn55hukq\\",\\"ipldBlock\\":\\"0xf87f20b87cf87a94b06e6db9288324738f04fcaac910f5a60102c1f8f842a09d3712f4978fc20b17a1dfbcd563f9aded75d05b6019427a9eca23245220138ba08f9e6c0c3630ec9bccfb22c903753257d2352a9800255daafcf1665ed3d4be45a00000000000000000000000000000000000000000000000000000000000000001\\"}}"} 6
-36 0x66ae1a7e87d6fb8b791fd98a7b5f001692626feec93f7607e945fd8450c8f9e3 233 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 DelegationTriggered {"principal":"0xDdb18b319BE3530560eECFF962032dFAD88212d4","agent":"0xDdb18b319BE3530560eECFF962032dFAD88212d4"} {"topics":["0x185d11175440fcb6458fbc1889b02953452539ed80ad1da781a5449500f6d960","0x000000000000000000000000ddb18b319be3530560eecff962032dfad88212d4"],"data":"0x000000000000000000000000ddb18b319be3530560eecff962032dfad88212d4","tx":{"cid":"bagjqcgzam2xbu7uh235yw6i73gfhwxyac2jge37oze7xmb7jix6yiugi7hrq","txHash":"0x66ae1a7e87d6fb8b791fd98a7b5f001692626feec93f7607e945fd8450c8f9e3","index":438,"src":"0xFDEa65C8e26263F6d9A1B5de9555D2931A33b825","dst":"0xB06E6DB9288324738f04fCAAc910f5A60102C1F8","__typename":"EthTransactionCid"},"eventSignature":"DelegationTriggered(address,address)"} {"data":"{\\"blockHash\\":\\"0x1f1f9ddf5435def50e966563a68afc302b1d43b1ead0c15a3b3397a17c452eb9\\",\\"receiptCID\\":\\"bagkacgzaickyui2bivfkwglvhlgs3dzbgzllvitvssccwsyg6evimm4hfaga\\",\\"log\\":{\\"cid\\":\\"bagmqcgzanj72wfbfvqby3dvz3jnh5nwstmvl3nlm6kxrkgfio7z643s2qesq\\",\\"ipldBlock\\":\\"0xf87f20b87cf87a94b06e6db9288324738f04fcaac910f5a60102c1f8f842a0185d11175440fcb6458fbc1889b02953452539ed80ad1da781a5449500f6d960a0000000000000000000000000ddb18b319be3530560eecff962032dfad88212d4a0000000000000000000000000ddb18b319be3530560eecff962032dfad88212d4\\"}}"} 6
-37 0x66ae1a7e87d6fb8b791fd98a7b5f001692626feec93f7607e945fd8450c8f9e3 234 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 PhisherStatusUpdated {"entity":"0x895499123a28e797f284b94560fcc346a421533cb3ed9d4373293d533849e523","isPhisher":true} {"topics":["0x9d3712f4978fc20b17a1dfbcd563f9aded75d05b6019427a9eca23245220138b","0x895499123a28e797f284b94560fcc346a421533cb3ed9d4373293d533849e523"],"data":"0x0000000000000000000000000000000000000000000000000000000000000001","tx":{"cid":"bagjqcgzam2xbu7uh235yw6i73gfhwxyac2jge37oze7xmb7jix6yiugi7hrq","txHash":"0x66ae1a7e87d6fb8b791fd98a7b5f001692626feec93f7607e945fd8450c8f9e3","index":438,"src":"0xFDEa65C8e26263F6d9A1B5de9555D2931A33b825","dst":"0xB06E6DB9288324738f04fCAAc910f5A60102C1F8","__typename":"EthTransactionCid"},"eventSignature":"PhisherStatusUpdated(string,bool)"} {"data":"{\\"blockHash\\":\\"0x1f1f9ddf5435def50e966563a68afc302b1d43b1ead0c15a3b3397a17c452eb9\\",\\"receiptCID\\":\\"bagkacgzaickyui2bivfkwglvhlgs3dzbgzllvitvssccwsyg6evimm4hfaga\\",\\"log\\":{\\"cid\\":\\"bagmqcgzapdolzcaiqir2ankq2of4kdts5spg7ov5ofkgqora47u6kmpijwza\\",\\"ipldBlock\\":\\"0xf87f20b87cf87a94b06e6db9288324738f04fcaac910f5a60102c1f8f842a09d3712f4978fc20b17a1dfbcd563f9aded75d05b6019427a9eca23245220138ba0895499123a28e797f284b94560fcc346a421533cb3ed9d4373293d533849e523a00000000000000000000000000000000000000000000000000000000000000001\\"}}"} 6
-38 0x66ae1a7e87d6fb8b791fd98a7b5f001692626feec93f7607e945fd8450c8f9e3 235 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 DelegationTriggered {"principal":"0xDdb18b319BE3530560eECFF962032dFAD88212d4","agent":"0xDdb18b319BE3530560eECFF962032dFAD88212d4"} {"topics":["0x185d11175440fcb6458fbc1889b02953452539ed80ad1da781a5449500f6d960","0x000000000000000000000000ddb18b319be3530560eecff962032dfad88212d4"],"data":"0x000000000000000000000000ddb18b319be3530560eecff962032dfad88212d4","tx":{"cid":"bagjqcgzam2xbu7uh235yw6i73gfhwxyac2jge37oze7xmb7jix6yiugi7hrq","txHash":"0x66ae1a7e87d6fb8b791fd98a7b5f001692626feec93f7607e945fd8450c8f9e3","index":438,"src":"0xFDEa65C8e26263F6d9A1B5de9555D2931A33b825","dst":"0xB06E6DB9288324738f04fCAAc910f5A60102C1F8","__typename":"EthTransactionCid"},"eventSignature":"DelegationTriggered(address,address)"} {"data":"{\\"blockHash\\":\\"0x1f1f9ddf5435def50e966563a68afc302b1d43b1ead0c15a3b3397a17c452eb9\\",\\"receiptCID\\":\\"bagkacgzaickyui2bivfkwglvhlgs3dzbgzllvitvssccwsyg6evimm4hfaga\\",\\"log\\":{\\"cid\\":\\"bagmqcgzanj72wfbfvqby3dvz3jnh5nwstmvl3nlm6kxrkgfio7z643s2qesq\\",\\"ipldBlock\\":\\"0xf87f20b87cf87a94b06e6db9288324738f04fcaac910f5a60102c1f8f842a0185d11175440fcb6458fbc1889b02953452539ed80ad1da781a5449500f6d960a0000000000000000000000000ddb18b319be3530560eecff962032dfad88212d4a0000000000000000000000000ddb18b319be3530560eecff962032dfad88212d4\\"}}"} 6
-39 0x66ae1a7e87d6fb8b791fd98a7b5f001692626feec93f7607e945fd8450c8f9e3 236 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 PhisherStatusUpdated {"entity":"0x6d99b9b8f38c764f028cc564a69e4aa3c0d94fd4df0a9b0aab23cec3cfa03426","isPhisher":true} {"topics":["0x9d3712f4978fc20b17a1dfbcd563f9aded75d05b6019427a9eca23245220138b","0x6d99b9b8f38c764f028cc564a69e4aa3c0d94fd4df0a9b0aab23cec3cfa03426"],"data":"0x0000000000000000000000000000000000000000000000000000000000000001","tx":{"cid":"bagjqcgzam2xbu7uh235yw6i73gfhwxyac2jge37oze7xmb7jix6yiugi7hrq","txHash":"0x66ae1a7e87d6fb8b791fd98a7b5f001692626feec93f7607e945fd8450c8f9e3","index":438,"src":"0xFDEa65C8e26263F6d9A1B5de9555D2931A33b825","dst":"0xB06E6DB9288324738f04fCAAc910f5A60102C1F8","__typename":"EthTransactionCid"},"eventSignature":"PhisherStatusUpdated(string,bool)"} {"data":"{\\"blockHash\\":\\"0x1f1f9ddf5435def50e966563a68afc302b1d43b1ead0c15a3b3397a17c452eb9\\",\\"receiptCID\\":\\"bagkacgzaickyui2bivfkwglvhlgs3dzbgzllvitvssccwsyg6evimm4hfaga\\",\\"log\\":{\\"cid\\":\\"bagmqcgzak4f2sns3dh6lmajwdimphm2h6rj4lqobhu2hrjndtzrkabhywuha\\",\\"ipldBlock\\":\\"0xf87f20b87cf87a94b06e6db9288324738f04fcaac910f5a60102c1f8f842a09d3712f4978fc20b17a1dfbcd563f9aded75d05b6019427a9eca23245220138ba06d99b9b8f38c764f028cc564a69e4aa3c0d94fd4df0a9b0aab23cec3cfa03426a00000000000000000000000000000000000000000000000000000000000000001\\"}}"} 6
-\.
-
-
---
--- Data for Name: ipld_block; Type: TABLE DATA; Schema: public; Owner: vdbm
---
-
-COPY public.ipld_block (id, contract_address, cid, kind, data, block_id) FROM stdin;
-1 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 bafyreigxcduvu2npfat2zunf2su63vmksekmqw6hlq7ijz7kfwvsbjolwe init \\xa2646d657461a4626964782a307842303645364442393238383332343733386630346643414163393130663541363031303243314638646b696e6464696e697466706172656e74a1612ff668657468426c6f636ba263636964a1612f783d626167696163677a61686b366171626270373568667432787674716e6a3432357161786a377a6534667370796b63733734356379786733346262336261636e756d1a00e2e4d1657374617465a0 1
-2 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 bafyreihshcncfaozkbpybok4scslmi4ogkdsmoo5guctkl3ov5ij4e7ena diff_staged \\xa2646d657461a4626964782a307842303645364442393238383332343733386630346643414163393130663541363031303243314638646b696e646b646966665f73746167656466706172656e74a1612f783b6261667972656967786364757675326e70666174327a756e663273753633766d6b73656b6d717736686c7137696a7a376b66777673626a6f6c776568657468426c6f636ba263636964a1612f783d626167696163677a61666466726e7a32617a766f783332646a7833726a6b377475696a347135686c786a7a78686461636b6d366a747937746371613461636e756d1a00e2fa61657374617465a16869734d656d626572a46c5457543a6b756d617669735f64747275656c5457543a6d6574616d61736b64747275656c5457543a74617976616e6f5f64747275656d5457543a64616e66696e6c61796474727565 2
-3 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 bafyreidnohfh3z2rgge2z6amrdn33ce66gdusrcwar2kfoig5ijozqo6he diff_staged \\xa2646d657461a4626964782a307842303645364442393238383332343733386630346643414163393130663541363031303243314638646b696e646b646966665f73746167656466706172656e74a1612f783b6261667972656967786364757675326e70666174327a756e663273753633766d6b73656b6d717736686c7137696a7a376b66777673626a6f6c776568657468426c6f636ba263636964a1612f783d626167696163677a616e36727078656534746d34676d7a6763657233797834656e70766f6474707a6e327432626a6a373263626c6b68726e6735627861636e756d1a00e2fef5657374617465a16869734d656d626572a26c5457543a72656b6d61726b736474727565715457543a6f6d6e61746f73686e6977616c6474727565 3
-4 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 bafyreidhsglp25dozbewxekeb5hueh5q4tu5kupwbn6q7tejtpmnk66qsu diff_staged \\xa2646d657461a4626964782a307842303645364442393238383332343733386630346643414163393130663541363031303243314638646b696e646b646966665f73746167656466706172656e74a1612f783b6261667972656967786364757675326e70666174327a756e663273753633766d6b73656b6d717736686c7137696a7a376b66777673626a6f6c776568657468426c6f636ba263636964a1612f783d626167696163677a616272636d6b6c736435633365677132686c72797067376f706167747675797371616635723271376e75653273746f7a6978626161636e756d1a00e32009657374617465a16869734d656d626572a66d5457543a61666475646c65793064747275656d5457543a666f616d737061636564747275656d5457543a66726f74686369747964747275656f5457543a76756c63616e697a65696f6474727565715457543a6d696b6567757368616e736b796474727565725457543a6c61636f6e69636e6574776f726b6474727565 4
-5 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 bafyreifocrnaxaj4qod3atzj4ipq3ocjztlydl3gcgmxiilbi4dbd2o2be diff_staged \\xa2646d657461a4626964782a307842303645364442393238383332343733386630346643414163393130663541363031303243314638646b696e646b646966665f73746167656466706172656e74a1612f783b6261667972656967786364757675326e70666174327a756e663273753633766d6b73656b6d717736686c7137696a7a376b66777673626a6f6c776568657468426c6f636ba263636964a1612f783d626167696163677a6176363268617963373362757a6b663234666f79683576726e7435346e64787637366d366f6637647072767071706b706c35737261636e756d1a00e3237b657374617465a16869734d656d626572a1735457543a64656e6e69736f6e6265727472616d6474727565 5
-6 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 bafyreicls2qpsocxj6yqwb2ujvrchi7zxeynh5qpevfy6o4un4qapwuwdy diff_staged \\xa2646d657461a4626964782a307842303645364442393238383332343733386630346643414163393130663541363031303243314638646b696e646b646966665f73746167656466706172656e74a1612f783b6261667972656967786364757675326e70666174327a756e663273753633766d6b73656b6d717736686c7137696a7a376b66777673626a6f6c776568657468426c6f636ba263636964a1612f783d626167696163677a616434707a33783275677870706b6475776d7672326e6378346761767232713572356c696d63777233676f6c326337636666323471636e756d1a00e87492657374617465a169697350686973686572a66e5457543a6a67686f7374323031306474727565715457543a6a6164656e37323434303030316474727565735457543a6261647361736b39323539333438396474727565735457543a6361737369647930363131343136356474727565735457543a65737472656c6c33313136333633316474727565735457543a6b696e6762656e37313335333833376474727565 6
-\.
-
-
---
--- Data for Name: ipld_status; Type: TABLE DATA; Schema: public; Owner: vdbm
---
-
-COPY public.ipld_status (id, latest_hooks_block_number, latest_checkpoint_block_number, latest_ipfs_block_number) FROM stdin;
+1 0x38b33ffb7fc3e0a540ff837cbb8eebd34ad039375d6aa71a6732ae350a2a6e04 130 0xD07Ed0eB708Cb7A660D22f2Ddf7b8C19c7bf1F69 OwnershipTransferred {"previousOwner":"0x0000000000000000000000000000000000000000","newOwner":"0xDdb18b319BE3530560eECFF962032dFAD88212d4"} {"topics":["0x8be0079c531659141344cd1fd0a4f28419497f9722a3daafe3b4186f6b6457e0","0x0000000000000000000000000000000000000000000000000000000000000000","0x000000000000000000000000ddb18b319be3530560eecff962032dfad88212d4"],"data":"0x","tx":{"cid":"bagjqcgzahczt7637ypqkkqh7qn6lxdxl2nfnaojxlvvkogthgkxdkcrknyca","txHash":"0x38b33ffb7fc3e0a540ff837cbb8eebd34ad039375d6aa71a6732ae350a2a6e04","index":19,"src":"0xDdb18b319BE3530560eECFF962032dFAD88212d4","dst":"","__typename":"EthTransactionCid"},"eventSignature":"OwnershipTransferred(address,address)"} {"data":"{\\"blockHash\\":\\"0xa5c68e0b9fe8e2e37eeae4c89297c62a688ca3f6b611dcbd1fce2cba67d049a6\\",\\"receiptCID\\":\\"bagkacgza2kim2ps4wbitho6rypgto2or3wmlv23exss5etuqrdhut5nrkjvq\\",\\"log\\":{\\"cid\\":\\"bagmqcgzahsekigljws2wv4b7nfa7noghcwf4goa7tbhglqgftu5ycnzxcbbq\\",\\"ipldBlock\\":\\"0xf882822080b87df87b94d07ed0eb708cb7a660d22f2ddf7b8c19c7bf1f69f863a08be0079c531659141344cd1fd0a4f28419497f9722a3daafe3b4186f6b6457e0a00000000000000000000000000000000000000000000000000000000000000000a0000000000000000000000000ddb18b319be3530560eecff962032dfad88212d480\\"}}"} 1
\.
@@ -701,19 +598,6 @@ COPY public.ipld_status (id, latest_hooks_block_number, latest_checkpoint_block_
--
COPY public.is_member (id, block_hash, block_number, contract_address, key0, value, proof) FROM stdin;
-1 0x28cb16e740cd5d7de869bee2957e7442790e9d774e6e71804a67933c7e628038 14875233 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 TWT:danfinlay t {"data":"{\\"blockHash\\":\\"0x28cb16e740cd5d7de869bee2957e7442790e9d774e6e71804a67933c7e628038\\",\\"account\\":{\\"address\\":\\"0xB06E6DB9288324738f04fCAAc910f5A60102C1F8\\",\\"storage\\":{\\"cid\\":\\"bagmacgzajz2idgp3mppl3xecw2jiyrdtpqxdsks3l2vayyrhylj2ddrsvf2q\\",\\"ipldBlock\\":\\"0xe2a0203d41e15b233c6d8a6221399699ffc64b2cca7ada26b947d7642b930362ca2001\\"}}}"}
-2 0x28cb16e740cd5d7de869bee2957e7442790e9d774e6e71804a67933c7e628038 14875233 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 TWT:metamask t {"data":"{\\"blockHash\\":\\"0x28cb16e740cd5d7de869bee2957e7442790e9d774e6e71804a67933c7e628038\\",\\"account\\":{\\"address\\":\\"0xB06E6DB9288324738f04fCAAc910f5A60102C1F8\\",\\"storage\\":{\\"cid\\":\\"bagmacgzayklkqlq7oyerf7d46p2bnccsqgnj24z5ey5iwnn3nesl5b6t2bba\\",\\"ipldBlock\\":\\"0xe2a0208bb17e9a3a883c386024f8e1a6976a71526c4598fd5577bde1e8e78dc5cceb01\\"}}}"}
-3 0x28cb16e740cd5d7de869bee2957e7442790e9d774e6e71804a67933c7e628038 14875233 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 TWT:kumavis_ t {"data":"{\\"blockHash\\":\\"0x28cb16e740cd5d7de869bee2957e7442790e9d774e6e71804a67933c7e628038\\",\\"account\\":{\\"address\\":\\"0xB06E6DB9288324738f04fCAAc910f5A60102C1F8\\",\\"storage\\":{\\"cid\\":\\"bagmacgzac4qmw47e5joqwqb62grydulsl62z6auzi3bpimezqowvedyqfz4a\\",\\"ipldBlock\\":\\"0xe2a020c4db4f66db1cb7f05bfa6518607749beab650a765c80492a458fbef069d21d01\\"}}}"}
-4 0x28cb16e740cd5d7de869bee2957e7442790e9d774e6e71804a67933c7e628038 14875233 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 TWT:tayvano_ t {"data":"{\\"blockHash\\":\\"0x28cb16e740cd5d7de869bee2957e7442790e9d774e6e71804a67933c7e628038\\",\\"account\\":{\\"address\\":\\"0xB06E6DB9288324738f04fCAAc910f5A60102C1F8\\",\\"storage\\":{\\"cid\\":\\"bagmacgzau2pcjzqad7bvet5tqprkvo75uyfiuiewle3rzgka65xb4msinxxq\\",\\"ipldBlock\\":\\"0xe2a0325a534478c2e78913d54d916517598739b2920691f3cdaa47dd025f4718492401\\"}}}"}
-5 0x6fa2fb909c9b386664c224778bf08d7d5c39bf2dd4f414a7fa1056a3c5a6e86e 14876405 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 TWT:rekmarks t {"data":"{\\"blockHash\\":\\"0x6fa2fb909c9b386664c224778bf08d7d5c39bf2dd4f414a7fa1056a3c5a6e86e\\",\\"account\\":{\\"address\\":\\"0xB06E6DB9288324738f04fCAAc910f5A60102C1F8\\",\\"storage\\":{\\"cid\\":\\"bagmacgza6bl5chphg5sp2hbmakf3m3hf5i2aqpwniit7fquldl4cyz6rcjyq\\",\\"ipldBlock\\":\\"0xe2a0370e3dd0b59d081149bd02578f68bc8b82b38d83a65eab9c0039330f2f44b1be01\\"}}}"}
-6 0x6fa2fb909c9b386664c224778bf08d7d5c39bf2dd4f414a7fa1056a3c5a6e86e 14876405 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 TWT:omnatoshniwal t {"data":"{\\"blockHash\\":\\"0x6fa2fb909c9b386664c224778bf08d7d5c39bf2dd4f414a7fa1056a3c5a6e86e\\",\\"account\\":{\\"address\\":\\"0xB06E6DB9288324738f04fCAAc910f5A60102C1F8\\",\\"storage\\":{\\"cid\\":\\"bagmacgzaevw2g7ldqq7u2cifx625hj2mtgpthw2gxo55hi3kfhmirlco27kq\\",\\"ipldBlock\\":\\"0xe2a020099e064c465e189f524b4ea5e1e1f880cc2404d54a5c3820cae1426406e3eb01\\"}}}"}
-7 0x0c44c52e43e8b64343475c70f37dcf01a75a6250017b1d43eda13529bb28b840 14884873 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 TWT:afdudley0 t {"data":"{\\"blockHash\\":\\"0x0c44c52e43e8b64343475c70f37dcf01a75a6250017b1d43eda13529bb28b840\\",\\"account\\":{\\"address\\":\\"0xB06E6DB9288324738f04fCAAc910f5A60102C1F8\\",\\"storage\\":{\\"cid\\":\\"bagmacgza4yb2o77os2exgj7ao2gmcycrktszfccus2pgiqayoyesbyv36yuq\\",\\"ipldBlock\\":\\"0xe2a0206f8288d5713c0319b22d7d7871ea9f79da0e2a69c4810045f7f9d8b513c97701\\"}}}"}
-8 0x0c44c52e43e8b64343475c70f37dcf01a75a6250017b1d43eda13529bb28b840 14884873 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 TWT:vulcanizeio t {"data":"{\\"blockHash\\":\\"0x0c44c52e43e8b64343475c70f37dcf01a75a6250017b1d43eda13529bb28b840\\",\\"account\\":{\\"address\\":\\"0xB06E6DB9288324738f04fCAAc910f5A60102C1F8\\",\\"storage\\":{\\"cid\\":\\"bagmacgza43rlyrrrvwbxuo4jwrk2aibx2yau2jwubvtkmufdu62ndxti5pla\\",\\"ipldBlock\\":\\"0xe2a020a206b39b5245e291b83d5b8bcad50fdca5196cedf7e717b87ab79b8d983f0701\\"}}}"}
-9 0x0c44c52e43e8b64343475c70f37dcf01a75a6250017b1d43eda13529bb28b840 14884873 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 TWT:laconicnetwork t {"data":"{\\"blockHash\\":\\"0x0c44c52e43e8b64343475c70f37dcf01a75a6250017b1d43eda13529bb28b840\\",\\"account\\":{\\"address\\":\\"0xB06E6DB9288324738f04fCAAc910f5A60102C1F8\\",\\"storage\\":{\\"cid\\":\\"bagmacgzaqlzj74qpi46z4lepfew43klj5jmyoiuzlhma6o6jozkjybc2lsvq\\",\\"ipldBlock\\":\\"0xe2a020ecd3a96a9329551758da7fdf41b5816885e29b184c3939c13c6ea20206fd2901\\"}}}"}
-10 0x0c44c52e43e8b64343475c70f37dcf01a75a6250017b1d43eda13529bb28b840 14884873 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 TWT:mikegushansky t {"data":"{\\"blockHash\\":\\"0x0c44c52e43e8b64343475c70f37dcf01a75a6250017b1d43eda13529bb28b840\\",\\"account\\":{\\"address\\":\\"0xB06E6DB9288324738f04fCAAc910f5A60102C1F8\\",\\"storage\\":{\\"cid\\":\\"bagmacgzalgavzfjocdkshzxwlpqmf3azofoz67rvulr5xxuqsvmmuvadwzdq\\",\\"ipldBlock\\":\\"0xe2a0202951bc50ed50810c883cc3f755dabb64394375acece9ea4be99e5a584fe6c901\\"}}}"}
-11 0x0c44c52e43e8b64343475c70f37dcf01a75a6250017b1d43eda13529bb28b840 14884873 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 TWT:foamspace t {"data":"{\\"blockHash\\":\\"0x0c44c52e43e8b64343475c70f37dcf01a75a6250017b1d43eda13529bb28b840\\",\\"account\\":{\\"address\\":\\"0xB06E6DB9288324738f04fCAAc910f5A60102C1F8\\",\\"storage\\":{\\"cid\\":\\"bagmacgzasy7at57g5wewqtzjlkh6vudbs7wbwx5qw7637fwi5b3nunw54usq\\",\\"ipldBlock\\":\\"0xe2a02029d04f9e7b98346aa9c447decb17659db9af23890b9c70f579a029cdcf593c01\\"}}}"}
-12 0x0c44c52e43e8b64343475c70f37dcf01a75a6250017b1d43eda13529bb28b840 14884873 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 TWT:frothcity t {"data":"{\\"blockHash\\":\\"0x0c44c52e43e8b64343475c70f37dcf01a75a6250017b1d43eda13529bb28b840\\",\\"account\\":{\\"address\\":\\"0xB06E6DB9288324738f04fCAAc910f5A60102C1F8\\",\\"storage\\":{\\"cid\\":\\"bagmacgzawvqcds52in2gemhyszayvrl5zfc66up6hcuchxmi3ce4kzi5pweq\\",\\"ipldBlock\\":\\"0xe2a02034ac30337c5c70d2540bb4434e35ce4532a4eab91c852dca23deaacb0e275201\\"}}}"}
-13 0xafb470605fd86995175c2bb07ed62d9f78d1debff33ce2fc6f8d5f07a9ebeca2 14885755 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 TWT:dennisonbertram t {"data":"{\\"blockHash\\":\\"0xafb470605fd86995175c2bb07ed62d9f78d1debff33ce2fc6f8d5f07a9ebeca2\\",\\"account\\":{\\"address\\":\\"0xB06E6DB9288324738f04fCAAc910f5A60102C1F8\\",\\"storage\\":{\\"cid\\":\\"bagmacgzaeogthongsys3jydz4jw2sj5t7mqeqbor2qnaium4c5h5v74fqbta\\",\\"ipldBlock\\":\\"0xe19f3fea74c522a79f7db606c382429e0cb363617f45d6fd59cc02a2857144f18801\\"}}}"}
\.
@@ -722,12 +606,6 @@ COPY public.is_member (id, block_hash, block_number, contract_address, key0, val
--
COPY public.is_phisher (id, block_hash, block_number, contract_address, key0, value, proof) FROM stdin;
-1 0x1f1f9ddf5435def50e966563a68afc302b1d43b1ead0c15a3b3397a17c452eb9 15234194 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 TWT:cassidy06114165 t {"data":"{\\"blockHash\\":\\"0x1f1f9ddf5435def50e966563a68afc302b1d43b1ead0c15a3b3397a17c452eb9\\",\\"account\\":{\\"address\\":\\"0xB06E6DB9288324738f04fCAAc910f5A60102C1F8\\",\\"storage\\":{\\"cid\\":\\"bagmacgzadyh6cl32cgz3rnd65247arv3fnjw7p6uqfcfysof4dksd2illf6q\\",\\"ipldBlock\\":\\"0xe2a0203c2016b922ff7b5efb562ade4ce1790eac49e191d0d6230b261475b1c2eb9b01\\"}}}"}
-2 0x1f1f9ddf5435def50e966563a68afc302b1d43b1ead0c15a3b3397a17c452eb9 15234194 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 TWT:badsask92593489 t {"data":"{\\"blockHash\\":\\"0x1f1f9ddf5435def50e966563a68afc302b1d43b1ead0c15a3b3397a17c452eb9\\",\\"account\\":{\\"address\\":\\"0xB06E6DB9288324738f04fCAAc910f5A60102C1F8\\",\\"storage\\":{\\"cid\\":\\"bagmacgzaqrrqdxcwdv654m3vpbiafzvjrhrvs7wv5wncbncb665dprx4cnzq\\",\\"ipldBlock\\":\\"0xe2a0204243b96ea0ada3c3ca9668be1e1ab841ee01999a18d1ebebae8ba2d24aa53101\\"}}}"}
-3 0x1f1f9ddf5435def50e966563a68afc302b1d43b1ead0c15a3b3397a17c452eb9 15234194 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 TWT:estrell31163631 t {"data":"{\\"blockHash\\":\\"0x1f1f9ddf5435def50e966563a68afc302b1d43b1ead0c15a3b3397a17c452eb9\\",\\"account\\":{\\"address\\":\\"0xB06E6DB9288324738f04fCAAc910f5A60102C1F8\\",\\"storage\\":{\\"cid\\":\\"bagmacgzaq25w3xcn7ahsaclw7lvbhv6wmuft6fwll6gs26pfure52vak2oea\\",\\"ipldBlock\\":\\"0xe2a020e7f0d045adaf03aaca32f26b20a70af72062abbdca72eca237efe7fe297a6a01\\"}}}"}
-4 0x1f1f9ddf5435def50e966563a68afc302b1d43b1ead0c15a3b3397a17c452eb9 15234194 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 TWT:kingben71353837 t {"data":"{\\"blockHash\\":\\"0x1f1f9ddf5435def50e966563a68afc302b1d43b1ead0c15a3b3397a17c452eb9\\",\\"account\\":{\\"address\\":\\"0xB06E6DB9288324738f04fCAAc910f5A60102C1F8\\",\\"storage\\":{\\"cid\\":\\"bagmacgzal4unm5r3ut4fsolqkibsowhada5aixdmjfaubaxamlrxes2t3eza\\",\\"ipldBlock\\":\\"0xe2a0347aeddef1702483d61eca78b85ff35caff4917a18acef04923858e206c58da401\\"}}}"}
-5 0x1f1f9ddf5435def50e966563a68afc302b1d43b1ead0c15a3b3397a17c452eb9 15234194 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 TWT:jaden72440001 t {"data":"{\\"blockHash\\":\\"0x1f1f9ddf5435def50e966563a68afc302b1d43b1ead0c15a3b3397a17c452eb9\\",\\"account\\":{\\"address\\":\\"0xB06E6DB9288324738f04fCAAc910f5A60102C1F8\\",\\"storage\\":{\\"cid\\":\\"bagmacgzadfup5fbucciy32alz4upntikcijqiqvwcjszkmuuugna26raioca\\",\\"ipldBlock\\":\\"0xe2a03c76ec48ccf04032d7c8463b37c68e68de9a2602967327c3c70f1a15a11f117b01\\"}}}"}
-6 0x1f1f9ddf5435def50e966563a68afc302b1d43b1ead0c15a3b3397a17c452eb9 15234194 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 TWT:jghost2010 t {"data":"{\\"blockHash\\":\\"0x1f1f9ddf5435def50e966563a68afc302b1d43b1ead0c15a3b3397a17c452eb9\\",\\"account\\":{\\"address\\":\\"0xB06E6DB9288324738f04fCAAc910f5A60102C1F8\\",\\"storage\\":{\\"cid\\":\\"bagmacgzab5h56mqwe45hy3labtlq5tp7hsquoimrfgx3c2eycghukydumcoq\\",\\"ipldBlock\\":\\"0xe2a03da5b9c90f8be3d46373dc4c983ff2427d64c22470e858e62e5b25dd53ff8c7e01\\"}}}"}
\.
@@ -747,6 +625,23 @@ COPY public.multi_nonce (id, block_hash, block_number, contract_address, key0, k
\.
+--
+-- Data for Name: state; Type: TABLE DATA; Schema: public; Owner: vdbm
+--
+
+COPY public.state (id, contract_address, cid, kind, data, block_id) FROM stdin;
+1 0xD07Ed0eB708Cb7A660D22f2Ddf7b8C19c7bf1F69 bafyreiditadjoj3dtvmwely4okdvsxiqbi5wqz6f4au5avxpwaqpwdnig4 init \\xa2646d657461a4626964782a307844303745643065423730384362374136363044323266324464663762384331396337626631463639646b696e6464696e697466706172656e74a1612ff668657468426c6f636ba263636964a1612f783d626167696163677a6175786469346334373564726f6737786b3474656a66663667666a75697a693777777969357a7069377a79776c757a36716a677461636e756d1a01120f38657374617465a0 1
+\.
+
+
+--
+-- Data for Name: state_sync_status; Type: TABLE DATA; Schema: public; Owner: vdbm
+--
+
+COPY public.state_sync_status (id, latest_indexed_block_number, latest_checkpoint_block_number) FROM stdin;
+\.
+
+
--
-- Data for Name: sync_status; Type: TABLE DATA; Schema: public; Owner: vdbm
--
@@ -755,6 +650,14 @@ COPY public.sync_status (id, chain_head_block_hash, chain_head_block_number, lat
\.
+--
+-- Data for Name: typeorm_metadata; Type: TABLE DATA; Schema: public; Owner: vdbm
+--
+
+COPY public.typeorm_metadata (type, database, schema, "table", name, value) FROM stdin;
+\.
+
+
--
-- Name: _owner_id_seq; Type: SEQUENCE SET; Schema: public; Owner: vdbm
--
@@ -766,7 +669,7 @@ SELECT pg_catalog.setval('public._owner_id_seq', 1, false);
-- Name: block_progress_id_seq; Type: SEQUENCE SET; Schema: public; Owner: vdbm
--
-SELECT pg_catalog.setval('public.block_progress_id_seq', 6, true);
+SELECT pg_catalog.setval('public.block_progress_id_seq', 1, true);
--
@@ -776,46 +679,25 @@ SELECT pg_catalog.setval('public.block_progress_id_seq', 6, true);
SELECT pg_catalog.setval('public.contract_id_seq', 1, true);
---
--- Name: domain_hash_id_seq; Type: SEQUENCE SET; Schema: public; Owner: vdbm
---
-
-SELECT pg_catalog.setval('public.domain_hash_id_seq', 1, false);
-
-
--
-- Name: event_id_seq; Type: SEQUENCE SET; Schema: public; Owner: vdbm
--
-SELECT pg_catalog.setval('public.event_id_seq', 39, true);
-
-
---
--- Name: ipld_block_id_seq; Type: SEQUENCE SET; Schema: public; Owner: vdbm
---
-
-SELECT pg_catalog.setval('public.ipld_block_id_seq', 6, true);
-
-
---
--- Name: ipld_status_id_seq; Type: SEQUENCE SET; Schema: public; Owner: vdbm
---
-
-SELECT pg_catalog.setval('public.ipld_status_id_seq', 1, false);
+SELECT pg_catalog.setval('public.event_id_seq', 1, true);
--
-- Name: is_member_id_seq; Type: SEQUENCE SET; Schema: public; Owner: vdbm
--
-SELECT pg_catalog.setval('public.is_member_id_seq', 13, true);
+SELECT pg_catalog.setval('public.is_member_id_seq', 1, true);
--
-- Name: is_phisher_id_seq; Type: SEQUENCE SET; Schema: public; Owner: vdbm
--
-SELECT pg_catalog.setval('public.is_phisher_id_seq', 6, true);
+SELECT pg_catalog.setval('public.is_phisher_id_seq', 1, false);
--
@@ -832,6 +714,20 @@ SELECT pg_catalog.setval('public.is_revoked_id_seq', 1, false);
SELECT pg_catalog.setval('public.multi_nonce_id_seq', 1, false);
+--
+-- Name: state_id_seq; Type: SEQUENCE SET; Schema: public; Owner: vdbm
+--
+
+SELECT pg_catalog.setval('public.state_id_seq', 1, true);
+
+
+--
+-- Name: state_sync_status_id_seq; Type: SEQUENCE SET; Schema: public; Owner: vdbm
+--
+
+SELECT pg_catalog.setval('public.state_sync_status_id_seq', 1, false);
+
+
--
-- Name: sync_status_id_seq; Type: SEQUENCE SET; Schema: public; Owner: vdbm
--
@@ -847,14 +743,6 @@ ALTER TABLE ONLY public.contract
ADD CONSTRAINT "PK_17c3a89f58a2997276084e706e8" PRIMARY KEY (id);
---
--- Name: domain_hash PK_1b2fb63b534a5a1034c9de4af2d; Type: CONSTRAINT; Schema: public; Owner: vdbm
---
-
-ALTER TABLE ONLY public.domain_hash
- ADD CONSTRAINT "PK_1b2fb63b534a5a1034c9de4af2d" PRIMARY KEY (id);
-
-
--
-- Name: event PK_30c2f3bbaf6d34a55f8ae6e4614; Type: CONSTRAINT; Schema: public; Owner: vdbm
--
@@ -871,14 +759,6 @@ ALTER TABLE ONLY public.multi_nonce
ADD CONSTRAINT "PK_31dab24db96d04fbf687ae28b00" PRIMARY KEY (id);
---
--- Name: ipld_block PK_35d483f7d0917b68494f40066ac; Type: CONSTRAINT; Schema: public; Owner: vdbm
---
-
-ALTER TABLE ONLY public.ipld_block
- ADD CONSTRAINT "PK_35d483f7d0917b68494f40066ac" PRIMARY KEY (id);
-
-
--
-- Name: _owner PK_3ecb7a5aa92511dde29aa90a070; Type: CONSTRAINT; Schema: public; Owner: vdbm
--
@@ -887,6 +767,14 @@ ALTER TABLE ONLY public._owner
ADD CONSTRAINT "PK_3ecb7a5aa92511dde29aa90a070" PRIMARY KEY (id);
+--
+-- Name: state PK_549ffd046ebab1336c3a8030a12; Type: CONSTRAINT; Schema: public; Owner: vdbm
+--
+
+ALTER TABLE ONLY public.state
+ ADD CONSTRAINT "PK_549ffd046ebab1336c3a8030a12" PRIMARY KEY (id);
+
+
--
-- Name: is_revoked PK_578b81f9905005c7113f7bed9a3; Type: CONSTRAINT; Schema: public; Owner: vdbm
--
@@ -903,6 +791,14 @@ ALTER TABLE ONLY public.is_phisher
ADD CONSTRAINT "PK_753c1da426677f67c51cd98d35e" PRIMARY KEY (id);
+--
+-- Name: state_sync_status PK_79008eeac54c8204777451693a4; Type: CONSTRAINT; Schema: public; Owner: vdbm
+--
+
+ALTER TABLE ONLY public.state_sync_status
+ ADD CONSTRAINT "PK_79008eeac54c8204777451693a4" PRIMARY KEY (id);
+
+
--
-- Name: sync_status PK_86336482262ab8d5b548a4a71b7; Type: CONSTRAINT; Schema: public; Owner: vdbm
--
@@ -927,14 +823,6 @@ ALTER TABLE ONLY public.block_progress
ADD CONSTRAINT "PK_c01eea7890543f34821c499e874" PRIMARY KEY (id);
---
--- Name: ipld_status PK_fda882aed0a0c022b9f4fccdb1c; Type: CONSTRAINT; Schema: public; Owner: vdbm
---
-
-ALTER TABLE ONLY public.ipld_status
- ADD CONSTRAINT "PK_fda882aed0a0c022b9f4fccdb1c" PRIMARY KEY (id);
-
-
--
-- Name: IDX_00a8ca7940094d8552d67c3b72; Type: INDEX; Schema: public; Owner: vdbm
--
@@ -970,6 +858,13 @@ CREATE UNIQUE INDEX "IDX_4bbe5fb40812718baf74cc9a79" ON public.contract USING bt
CREATE UNIQUE INDEX "IDX_4c753e21652bf260667b3c1fd7" ON public.multi_nonce USING btree (block_hash, contract_address, key0, key1);
+--
+-- Name: IDX_4e2cda4bdccf560c590725a873; Type: INDEX; Schema: public; Owner: vdbm
+--
+
+CREATE UNIQUE INDEX "IDX_4e2cda4bdccf560c590725a873" ON public.state USING btree (cid);
+
+
--
-- Name: IDX_53e551bea07ca0f43c6a7a4cbb; Type: INDEX; Schema: public; Owner: vdbm
--
@@ -977,20 +872,6 @@ CREATE UNIQUE INDEX "IDX_4c753e21652bf260667b3c1fd7" ON public.multi_nonce USING
CREATE INDEX "IDX_53e551bea07ca0f43c6a7a4cbb" ON public.block_progress USING btree (block_number);
---
--- Name: IDX_560b81b666276c48e0b330c22c; Type: INDEX; Schema: public; Owner: vdbm
---
-
-CREATE UNIQUE INDEX "IDX_560b81b666276c48e0b330c22c" ON public.domain_hash USING btree (block_hash, contract_address);
-
-
---
--- Name: IDX_679fe4cab2565b7be29dcd60c7; Type: INDEX; Schema: public; Owner: vdbm
---
-
-CREATE INDEX "IDX_679fe4cab2565b7be29dcd60c7" ON public.ipld_block USING btree (block_id, contract_address);
-
-
--
-- Name: IDX_9b12e478c35b95a248a04a8fbb; Type: INDEX; Schema: public; Owner: vdbm
--
@@ -999,10 +880,10 @@ CREATE INDEX "IDX_9b12e478c35b95a248a04a8fbb" ON public.block_progress USING btr
--
--- Name: IDX_a6953a5fcd777425c6001c1898; Type: INDEX; Schema: public; Owner: vdbm
+-- Name: IDX_9b8bf5de8cfaed9e63b97340d8; Type: INDEX; Schema: public; Owner: vdbm
--
-CREATE UNIQUE INDEX "IDX_a6953a5fcd777425c6001c1898" ON public.ipld_block USING btree (cid);
+CREATE UNIQUE INDEX "IDX_9b8bf5de8cfaed9e63b97340d8" ON public.state USING btree (block_id, contract_address, kind);
--
@@ -1012,13 +893,6 @@ CREATE UNIQUE INDEX "IDX_a6953a5fcd777425c6001c1898" ON public.ipld_block USING
CREATE INDEX "IDX_ad541e3a5a00acd4d422c16ada" ON public.event USING btree (block_id, contract);
---
--- Name: IDX_b776a4314e7a73aa666ab272d7; Type: INDEX; Schema: public; Owner: vdbm
---
-
-CREATE UNIQUE INDEX "IDX_b776a4314e7a73aa666ab272d7" ON public.ipld_block USING btree (block_id, contract_address, kind);
-
-
--
-- Name: IDX_c86bf8a9f1c566350c422b7d3a; Type: INDEX; Schema: public; Owner: vdbm
--
@@ -1040,6 +914,13 @@ CREATE INDEX "IDX_d3855d762b0f9fcf9e8a707ef7" ON public.event USING btree (block
CREATE UNIQUE INDEX "IDX_d67dffa77e472e6163e619f423" ON public.is_phisher USING btree (block_hash, contract_address, key0);
+--
+-- Name: IDX_f8cc517e095dc778b3d0717728; Type: INDEX; Schema: public; Owner: vdbm
+--
+
+CREATE INDEX "IDX_f8cc517e095dc778b3d0717728" ON public.state USING btree (block_id, contract_address);
+
+
--
-- Name: event FK_2b0d35d675c4f99751855c45021; Type: FK CONSTRAINT; Schema: public; Owner: vdbm
--
@@ -1049,11 +930,11 @@ ALTER TABLE ONLY public.event
--
--- Name: ipld_block FK_6fe551100c8a6d305b9c22ac6f3; Type: FK CONSTRAINT; Schema: public; Owner: vdbm
+-- Name: state FK_460a61f455747f1b1f1614a5289; Type: FK CONSTRAINT; Schema: public; Owner: vdbm
--
-ALTER TABLE ONLY public.ipld_block
- ADD CONSTRAINT "FK_6fe551100c8a6d305b9c22ac6f3" FOREIGN KEY (block_id) REFERENCES public.block_progress(id) ON DELETE CASCADE;
+ALTER TABLE ONLY public.state
+ ADD CONSTRAINT "FK_460a61f455747f1b1f1614a5289" FOREIGN KEY (block_id) REFERENCES public.block_progress(id) ON DELETE CASCADE;
--
diff --git a/app/data/config/watcher-mobymask/mobymask-watcher.toml b/app/data/config/watcher-mobymask/mobymask-watcher.toml
index dd91cf8f..0ecf6287 100644
--- a/app/data/config/watcher-mobymask/mobymask-watcher.toml
+++ b/app/data/config/watcher-mobymask/mobymask-watcher.toml
@@ -9,8 +9,8 @@
# Checkpoint interval in number of blocks.
checkpointInterval = 2000
- # IPFS API address (can be taken from the output on running the IPFS daemon).
- # ipfsApiAddr = "/ip4/127.0.0.1/tcp/5001"
+ # Enable state creation
+ enableState = true
# Boolean to filter logs by contract.
filterLogs = true
@@ -51,3 +51,6 @@
maxCompletionLagInSecs = 300
jobDelayInMilliSecs = 100
eventsInBatch = 50
+ blockDelayInMilliSecs = 2000
+ prefetchBlocksInMem = false
+ prefetchBlockCount = 10
diff --git a/app/data/container-build/cerc-builder-js/build-npm-package-local-dependencies.sh b/app/data/container-build/cerc-builder-js/build-npm-package-local-dependencies.sh
index 5ae0e073..446b05d1 100755
--- a/app/data/container-build/cerc-builder-js/build-npm-package-local-dependencies.sh
+++ b/app/data/container-build/cerc-builder-js/build-npm-package-local-dependencies.sh
@@ -1,7 +1,7 @@
#!/bin/bash
# Usage: build-npm-package-local-dependencies.sh
# Runs build-npm-package.sh after first fixing up yarn.lock to use a local
-# npm registry for all packages in a specific scope (currently @cerc-io, @lirewine and @muknsys)
+# npm registry for all packages in a specific scope (currently @cerc-io and @lirewine)
if [ -n "$CERC_SCRIPT_DEBUG" ]; then
set -x
fi
@@ -18,7 +18,7 @@ set -e
local_npm_registry_url=$1
package_publish_version=$2
# If we need to handle an additional scope, add it to the list below:
-npm_scopes_to_handle=("@cerc-io" "@lirewine" "@muknsys")
+npm_scopes_to_handle=("@cerc-io" "@lirewine")
for npm_scope_for_local in ${npm_scopes_to_handle[@]}
do
# We need to configure the local registry
diff --git a/app/data/container-build/cerc-builder-js/build-npm-package.sh b/app/data/container-build/cerc-builder-js/build-npm-package.sh
index db27955c..375712e5 100755
--- a/app/data/container-build/cerc-builder-js/build-npm-package.sh
+++ b/app/data/container-build/cerc-builder-js/build-npm-package.sh
@@ -24,7 +24,6 @@ package_name=$( cat package.json | jq -r .name )
local_npm_registry_url=$1
npm config set @cerc-io:registry ${local_npm_registry_url}
npm config set @lirewine:registry ${local_npm_registry_url}
-npm config set @muknsys:registry ${local_npm_registry_url}
# Workaround bug in npm unpublish where it needs the url to be of the form // and not http://
local_npm_registry_url_fixed=$( echo ${local_npm_registry_url} | sed -e 's/^http[s]\{0,1\}://')
npm config set -- ${local_npm_registry_url_fixed}:_authToken ${CERC_NPM_AUTH_TOKEN}
diff --git a/app/data/container-build/cerc-eth-api-proxy/build.sh b/app/data/container-build/cerc-eth-api-proxy/build.sh
new file mode 100755
index 00000000..e21e7f9d
--- /dev/null
+++ b/app/data/container-build/cerc-eth-api-proxy/build.sh
@@ -0,0 +1,9 @@
+#!/usr/bin/env bash
+# Build cerc/eth-api-proxy
+
+source ${CERC_CONTAINER_BASE_DIR}/build-base.sh
+
+# See: https://stackoverflow.com/a/246128/1701505
+SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
+
+docker build -t cerc/eth-api-proxy:local ${build_command_args} ${CERC_REPO_BASE_DIR}/eth-api-proxy
diff --git a/app/data/container-build/cerc-fixturenet-eth-genesis/genesis/accounts/mnemonic_to_csv.py b/app/data/container-build/cerc-fixturenet-eth-genesis/genesis/accounts/mnemonic_to_csv.py
index 365c3775..1e6d10f4 100644
--- a/app/data/container-build/cerc-fixturenet-eth-genesis/genesis/accounts/mnemonic_to_csv.py
+++ b/app/data/container-build/cerc-fixturenet-eth-genesis/genesis/accounts/mnemonic_to_csv.py
@@ -1,5 +1,4 @@
from web3.auto import w3
-import json
import ruamel.yaml as yaml
import sys
diff --git a/app/data/container-build/cerc-go-nitro/build.sh b/app/data/container-build/cerc-go-nitro/build.sh
new file mode 100755
index 00000000..0637f46d
--- /dev/null
+++ b/app/data/container-build/cerc-go-nitro/build.sh
@@ -0,0 +1,6 @@
+#!/usr/bin/env bash
+# Build cerc/go-nitro
+
+source ${CERC_CONTAINER_BASE_DIR}/build-base.sh
+
+docker build -t cerc/go-nitro:local -f ${CERC_REPO_BASE_DIR}/go-nitro/docker/local/Dockerfile ${build_command_args} ${CERC_REPO_BASE_DIR}/go-nitro
diff --git a/app/data/container-build/cerc-keycloak-reg-api/build.sh b/app/data/container-build/cerc-keycloak-reg-api/build.sh
new file mode 100755
index 00000000..c591c2f0
--- /dev/null
+++ b/app/data/container-build/cerc-keycloak-reg-api/build.sh
@@ -0,0 +1,9 @@
+#!/usr/bin/env bash
+# Build cerc/keycloak-reg-api
+
+source ${CERC_CONTAINER_BASE_DIR}/build-base.sh
+
+# See: https://stackoverflow.com/a/246128/1701505
+SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
+
+docker build -t cerc/keycloak-reg-api:local ${build_command_args} ${CERC_REPO_BASE_DIR}/keycloak-reg-api
diff --git a/app/data/container-build/cerc-keycloak-reg-ui/build.sh b/app/data/container-build/cerc-keycloak-reg-ui/build.sh
new file mode 100755
index 00000000..3124dae6
--- /dev/null
+++ b/app/data/container-build/cerc-keycloak-reg-ui/build.sh
@@ -0,0 +1,9 @@
+#!/usr/bin/env bash
+# Build cerc/keycloak-reg-ui
+
+source ${CERC_CONTAINER_BASE_DIR}/build-base.sh
+
+# See: https://stackoverflow.com/a/246128/1701505
+SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
+
+docker build -t cerc/keycloak-reg-ui:local ${build_command_args} ${CERC_REPO_BASE_DIR}/keycloak-reg-ui
diff --git a/app/data/container-build/cerc-laconic-console-host/Dockerfile b/app/data/container-build/cerc-laconic-console-host/Dockerfile
index 7e3fc46b..95ddff94 100644
--- a/app/data/container-build/cerc-laconic-console-host/Dockerfile
+++ b/app/data/container-build/cerc-laconic-console-host/Dockerfile
@@ -1,69 +1,15 @@
-# Originally from: https://github.com/devcontainers/images/blob/main/src/javascript-node/.devcontainer/Dockerfile
-# [Choice] Node.js version (use -bullseye variants on local arm64/Apple Silicon): 18, 16, 14, 18-bullseye, 16-bullseye, 14-bullseye, 18-buster, 16-buster, 14-buster
-ARG VARIANT=18-bullseye
-FROM node:${VARIANT}
-
-ARG USERNAME=node
-ARG NPM_GLOBAL=/usr/local/share/npm-global
+FROM cerc/webapp-base:local
# This container pulls npm packages from a local registry configured via these env vars
ARG CERC_NPM_REGISTRY_URL
ARG CERC_NPM_AUTH_TOKEN
-# Add NPM global to PATH.
-ENV PATH=${NPM_GLOBAL}/bin:${PATH}
-# Prevents npm from printing version warnings
-ENV NPM_CONFIG_UPDATE_NOTIFIER=false
-
-RUN \
- # Configure global npm install location, use group to adapt to UID/GID changes
- if ! cat /etc/group | grep -e "^npm:" > /dev/null 2>&1; then groupadd -r npm; fi \
- && usermod -a -G npm ${USERNAME} \
- && umask 0002 \
- && mkdir -p ${NPM_GLOBAL} \
- && touch /usr/local/etc/npmrc \
- && chown ${USERNAME}:npm ${NPM_GLOBAL} /usr/local/etc/npmrc \
- && chmod g+s ${NPM_GLOBAL} \
- && npm config -g set prefix ${NPM_GLOBAL} \
- && su ${USERNAME} -c "npm config -g set prefix ${NPM_GLOBAL}" \
- # Install eslint
- && su ${USERNAME} -c "umask 0002 && npm install -g eslint" \
- && npm cache clean --force > /dev/null 2>&1
-
-# [Optional] Uncomment this section to install additional OS packages.
-RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
- && apt-get -y install --no-install-recommends jq
-
-# [Optional] Uncomment if you want to install an additional version of node using nvm
-# ARG EXTRA_NODE_VERSION=10
-# RUN su node -c "source /usr/local/share/nvm/nvm.sh && nvm install ${EXTRA_NODE_VERSION}"
-
-# We do this to get a yq binary from the published container, for the correct architecture we're building here
-COPY --from=docker.io/mikefarah/yq:latest /usr/bin/yq /usr/local/bin/yq
-
-RUN mkdir -p /scripts
-COPY ./apply-webapp-config.sh /scripts
-COPY ./start-serving-app.sh /scripts
-
-# [Optional] Uncomment if you want to install more global node modules
-# RUN su node -c "npm install -g "
-
# Configure the local npm registry
RUN npm config set @cerc-io:registry ${CERC_NPM_REGISTRY_URL} \
&& npm config set @lirewine:registry ${CERC_NPM_REGISTRY_URL} \
&& npm config set -- ${CERC_NPM_REGISTRY_URL}:_authToken ${CERC_NPM_AUTH_TOKEN}
-RUN mkdir -p /config
-COPY ./config.yml /config
-
-# Install simple web server for now (use nginx perhaps later)
-RUN yarn global add http-server
-
# Globally install the payload web app package
RUN yarn global add @cerc-io/console-app
-# Expose port for http
-EXPOSE 80
-
-# Default command sleeps forever so docker doesn't kill it
-CMD ["/scripts/start-serving-app.sh"]
+COPY ./config.yml /config
diff --git a/app/data/container-build/cerc-laconic-console-host/start-serving-app.sh b/app/data/container-build/cerc-laconic-console-host/start-serving-app.sh
deleted file mode 100755
index a322e5fb..00000000
--- a/app/data/container-build/cerc-laconic-console-host/start-serving-app.sh
+++ /dev/null
@@ -1,8 +0,0 @@
-#!/usr/bin/env bash
-if [ -n "$CERC_SCRIPT_DEBUG" ]; then
- set -x
-fi
-# TODO: Don't hard wire this:
-webapp_files_dir=/usr/local/share/.config/yarn/global/node_modules/@cerc-io/console-app/dist/production
-/scripts/apply-webapp-config.sh /config/config.yml ${webapp_files_dir}
-http-server -p 80 ${webapp_files_dir}
diff --git a/app/data/container-build/cerc-mobymask-snap/Dockerfile b/app/data/container-build/cerc-mobymask-snap/Dockerfile
new file mode 100644
index 00000000..f80969c3
--- /dev/null
+++ b/app/data/container-build/cerc-mobymask-snap/Dockerfile
@@ -0,0 +1,13 @@
+FROM node:18.15.0-alpine3.16
+
+RUN apk --update --no-cache add git python3 alpine-sdk bash
+
+WORKDIR /app
+
+COPY . .
+
+RUN echo "Installing dependencies..." && \
+ yarn install && \
+ cd packages/snap
+
+CMD ["bash", "-c", "yarn start"]
diff --git a/app/data/container-build/cerc-mobymask-snap/build.sh b/app/data/container-build/cerc-mobymask-snap/build.sh
new file mode 100755
index 00000000..c82f96ca
--- /dev/null
+++ b/app/data/container-build/cerc-mobymask-snap/build.sh
@@ -0,0 +1,7 @@
+#!/usr/bin/env bash
+# Build cerc/mobymask-snap
+
+source ${CERC_CONTAINER_BASE_DIR}/build-base.sh
+SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
+
+docker build -t cerc/mobymask-snap:local -f ${SCRIPT_DIR}/Dockerfile ${build_command_args} ${CERC_REPO_BASE_DIR}/mobymask-snap
diff --git a/app/data/container-build/cerc-mobymask-ui/Dockerfile b/app/data/container-build/cerc-mobymask-ui/Dockerfile
index 56e72a7a..e00ac663 100644
--- a/app/data/container-build/cerc-mobymask-ui/Dockerfile
+++ b/app/data/container-build/cerc-mobymask-ui/Dockerfile
@@ -1,6 +1,6 @@
# Originally from: https://github.com/devcontainers/images/blob/main/src/javascript-node/.devcontainer/Dockerfile
# [Choice] Node.js version (use -bullseye variants on local arm64/Apple Silicon): 18, 16, 14, 18-bullseye, 16-bullseye, 14-bullseye, 18-buster, 16-buster, 14-buster
-ARG VARIANT=16-bullseye
+ARG VARIANT=18-bullseye
FROM node:${VARIANT}
ARG USERNAME=node
@@ -37,7 +37,7 @@ RUN yarn global add http-server
WORKDIR /app
COPY . .
-RUN npm install
+RUN yarn install
# Expose port for http
EXPOSE 80
diff --git a/app/data/container-build/cerc-nitro-contracts/Dockerfile b/app/data/container-build/cerc-nitro-contracts/Dockerfile
new file mode 100644
index 00000000..23c95886
--- /dev/null
+++ b/app/data/container-build/cerc-nitro-contracts/Dockerfile
@@ -0,0 +1,12 @@
+FROM node:18.17.1-alpine3.18
+
+RUN apk --update --no-cache add python3 alpine-sdk bash curl jq
+
+WORKDIR /app
+
+COPY . .
+
+RUN echo "Installing dependencies" && \
+ yarn
+
+WORKDIR /app/packages/nitro-util
diff --git a/app/data/container-build/cerc-nitro-contracts/build.sh b/app/data/container-build/cerc-nitro-contracts/build.sh
new file mode 100755
index 00000000..c0aa770f
--- /dev/null
+++ b/app/data/container-build/cerc-nitro-contracts/build.sh
@@ -0,0 +1,9 @@
+#!/usr/bin/env bash
+# Build cerc/nitro-contracts
+
+source ${CERC_CONTAINER_BASE_DIR}/build-base.sh
+
+# See: https://stackoverflow.com/a/246128/1701505
+SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
+
+docker build -t cerc/nitro-contracts:local -f ${SCRIPT_DIR}/Dockerfile ${build_command_args} ${CERC_REPO_BASE_DIR}/ts-nitro
diff --git a/app/data/container-build/cerc-plugeth-statediff/build.sh b/app/data/container-build/cerc-plugeth-statediff/build.sh
index f1a9ffd5..15a77ef6 100755
--- a/app/data/container-build/cerc-plugeth-statediff/build.sh
+++ b/app/data/container-build/cerc-plugeth-statediff/build.sh
@@ -1,4 +1,10 @@
#!/usr/bin/env bash
# Build cerc/plugeth-statediff
source ${CERC_CONTAINER_BASE_DIR}/build-base.sh
-docker build -t cerc/plugeth-statediff:local ${build_command_args} ${CERC_REPO_BASE_DIR}/plugeth-statediff
+# This container build currently requires access to private dependencies in gitea
+# so we check that the necessary access token has been supplied here, then pass it o the build
+if [[ -z "${CERC_GO_AUTH_TOKEN}" ]]; then
+ echo "ERROR: CERC_GO_AUTH_TOKEN is not set" >&2
+ exit 1
+fi
+docker build -t cerc/plugeth-statediff:local ${build_command_args} --build-arg GIT_VDBTO_TOKEN=${CERC_GO_AUTH_TOKEN} ${CERC_REPO_BASE_DIR}/plugeth-statediff
diff --git a/app/data/container-build/cerc-plugeth/build.sh b/app/data/container-build/cerc-plugeth/build.sh
index 7c778877..1162a192 100755
--- a/app/data/container-build/cerc-plugeth/build.sh
+++ b/app/data/container-build/cerc-plugeth/build.sh
@@ -1,4 +1,10 @@
#!/usr/bin/env bash
# Build cerc/plugeth
source ${CERC_CONTAINER_BASE_DIR}/build-base.sh
-docker build -t cerc/plugeth:local ${build_command_args} ${CERC_REPO_BASE_DIR}/plugeth
+# This container build currently requires access to private dependencies in gitea
+# so we check that the necessary access token has been supplied here, then pass it o the build
+if [[ -z "${CERC_GO_AUTH_TOKEN}" ]]; then
+ echo "ERROR: CERC_GO_AUTH_TOKEN is not set" >&2
+ exit 1
+fi
+docker build -t cerc/plugeth:local ${build_command_args} --build-arg GIT_VDBTO_TOKEN=${CERC_GO_AUTH_TOKEN} ${CERC_REPO_BASE_DIR}/plugeth
diff --git a/app/data/container-build/cerc-ponder/Dockerfile b/app/data/container-build/cerc-ponder/Dockerfile
new file mode 100644
index 00000000..ff247101
--- /dev/null
+++ b/app/data/container-build/cerc-ponder/Dockerfile
@@ -0,0 +1,13 @@
+FROM node:18.15.0-alpine3.16
+
+RUN apk --update --no-cache add git alpine-sdk bash jq curl
+RUN curl -L https://unpkg.com/@pnpm/self-installer | node
+
+WORKDIR /app
+
+COPY . .
+
+RUN echo "Installing dependencies and building..." && \
+ pnpm install && pnpm build && \
+ cd examples/token-erc20 && \
+ pnpm install
diff --git a/app/data/container-build/cerc-ponder/build.sh b/app/data/container-build/cerc-ponder/build.sh
new file mode 100755
index 00000000..fa3e44c0
--- /dev/null
+++ b/app/data/container-build/cerc-ponder/build.sh
@@ -0,0 +1,7 @@
+#!/usr/bin/env bash
+
+# Build cerc/ponder
+source ${CERC_CONTAINER_BASE_DIR}/build-base.sh
+SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
+
+docker build -t cerc/ponder:local -f ${SCRIPT_DIR}/Dockerfile ${build_command_args} ${CERC_REPO_BASE_DIR}/ponder
diff --git a/app/data/container-build/cerc-watcher-mobymask-v3/Dockerfile b/app/data/container-build/cerc-watcher-mobymask-v3/Dockerfile
new file mode 100644
index 00000000..3e07eec9
--- /dev/null
+++ b/app/data/container-build/cerc-watcher-mobymask-v3/Dockerfile
@@ -0,0 +1,20 @@
+FROM ubuntu:22.04
+
+RUN apt-get update \
+ && apt-get install -y curl wget gnupg build-essential \
+ && curl --silent --location https://deb.nodesource.com/setup_18.x | bash - \
+ && apt-get update \
+ && apt-get install -y nodejs git busybox jq \
+ && node -v
+
+RUN corepack enable \
+ && yarn --version
+
+WORKDIR /app
+
+COPY . .
+
+RUN echo "Building mobymask-v2-watcher-ts" && \
+ yarn && yarn build
+
+WORKDIR /app
diff --git a/app/data/container-build/cerc-watcher-mobymask-v3/build.sh b/app/data/container-build/cerc-watcher-mobymask-v3/build.sh
new file mode 100755
index 00000000..1c26a4ce
--- /dev/null
+++ b/app/data/container-build/cerc-watcher-mobymask-v3/build.sh
@@ -0,0 +1,9 @@
+#!/usr/bin/env bash
+# Build cerc/watcher-mobymask-v3
+
+source ${CERC_CONTAINER_BASE_DIR}/build-base.sh
+
+# See: https://stackoverflow.com/a/246128/1701505
+SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
+
+docker build -t cerc/watcher-mobymask-v3:local -f ${SCRIPT_DIR}/Dockerfile ${build_command_args} ${CERC_REPO_BASE_DIR}/mobymask-v2-watcher-ts
diff --git a/app/data/container-build/cerc-watcher-mobymask/Dockerfile b/app/data/container-build/cerc-watcher-mobymask/Dockerfile
index 7994237a..a0fb4af2 100644
--- a/app/data/container-build/cerc-watcher-mobymask/Dockerfile
+++ b/app/data/container-build/cerc-watcher-mobymask/Dockerfile
@@ -1,5 +1,5 @@
# TODO: move this into the cerc-io/mobymask-watcher repo
-FROM node:16.17.1-alpine3.16
+FROM node:18.17.1-alpine3.18
RUN apk --update --no-cache add git python3 alpine-sdk
@@ -7,8 +7,5 @@ WORKDIR /app
COPY . .
-RUN echo "Building watcher-ts" && \
- git checkout v0.2.19 && \
+RUN echo "Building mobymask-watcher-ts" && \
yarn && yarn build
-
-WORKDIR /app/packages/mobymask-watcher
diff --git a/app/data/container-build/cerc-watcher-mobymask/build.sh b/app/data/container-build/cerc-watcher-mobymask/build.sh
index 219b864f..2802ce70 100755
--- a/app/data/container-build/cerc-watcher-mobymask/build.sh
+++ b/app/data/container-build/cerc-watcher-mobymask/build.sh
@@ -6,6 +6,6 @@ source ${CERC_CONTAINER_BASE_DIR}/build-base.sh
# See: https://stackoverflow.com/a/246128/1701505
SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
-docker build -t cerc/watcher-mobymask:local -f ${SCRIPT_DIR}/Dockerfile ${build_command_args} ${CERC_REPO_BASE_DIR}/watcher-ts
+docker build -t cerc/watcher-mobymask:local -f ${SCRIPT_DIR}/Dockerfile ${build_command_args} ${CERC_REPO_BASE_DIR}/mobymask-watcher-ts
# TODO: add a mechanism to pass two repos into a container rather than the parent directory
diff --git a/app/data/container-build/cerc-webapp-base/Dockerfile b/app/data/container-build/cerc-webapp-base/Dockerfile
new file mode 100644
index 00000000..275a5c3c
--- /dev/null
+++ b/app/data/container-build/cerc-webapp-base/Dockerfile
@@ -0,0 +1,57 @@
+# Originally from: https://github.com/devcontainers/images/blob/main/src/javascript-node/.devcontainer/Dockerfile
+# [Choice] Node.js version (use -bullseye variants on local arm64/Apple Silicon): 18, 16, 14, 18-bullseye, 16-bullseye, 14-bullseye, 18-buster, 16-buster, 14-buster
+ARG VARIANT=18-bullseye
+FROM node:${VARIANT}
+
+ARG USERNAME=node
+ARG NPM_GLOBAL=/usr/local/share/npm-global
+
+# Add NPM global to PATH.
+ENV PATH=${NPM_GLOBAL}/bin:${PATH}
+# Prevents npm from printing version warnings
+ENV NPM_CONFIG_UPDATE_NOTIFIER=false
+
+RUN \
+ # Configure global npm install location, use group to adapt to UID/GID changes
+ if ! cat /etc/group | grep -e "^npm:" > /dev/null 2>&1; then groupadd -r npm; fi \
+ && usermod -a -G npm ${USERNAME} \
+ && umask 0002 \
+ && mkdir -p ${NPM_GLOBAL} \
+ && touch /usr/local/etc/npmrc \
+ && chown ${USERNAME}:npm ${NPM_GLOBAL} /usr/local/etc/npmrc \
+ && chmod g+s ${NPM_GLOBAL} \
+ && npm config -g set prefix ${NPM_GLOBAL} \
+ && su ${USERNAME} -c "npm config -g set prefix ${NPM_GLOBAL}" \
+ # Install eslint
+ && su ${USERNAME} -c "umask 0002 && npm install -g eslint" \
+ && npm cache clean --force > /dev/null 2>&1
+
+# [Optional] Uncomment this section to install additional OS packages.
+RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
+ && apt-get -y install --no-install-recommends jq
+
+# [Optional] Uncomment if you want to install an additional version of node using nvm
+# ARG EXTRA_NODE_VERSION=10
+# RUN su node -c "source /usr/local/share/nvm/nvm.sh && nvm install ${EXTRA_NODE_VERSION}"
+
+# We do this to get a yq binary from the published container, for the correct architecture we're building here
+COPY --from=docker.io/mikefarah/yq:latest /usr/bin/yq /usr/local/bin/yq
+
+RUN mkdir -p /scripts
+COPY ./apply-webapp-config.sh /scripts
+COPY ./start-serving-app.sh /scripts
+
+# [Optional] Uncomment if you want to install more global node modules
+# RUN su node -c "npm install -g "
+
+RUN mkdir -p /config
+COPY ./config.yml /config
+
+# Install simple web server for now (use nginx perhaps later)
+RUN yarn global add http-server
+
+# Expose port for http
+EXPOSE 80
+
+# Default command sleeps forever so docker doesn't kill it
+CMD ["/scripts/start-serving-app.sh"]
diff --git a/app/data/container-build/cerc-laconic-console-host/apply-webapp-config.sh b/app/data/container-build/cerc-webapp-base/apply-webapp-config.sh
similarity index 89%
rename from app/data/container-build/cerc-laconic-console-host/apply-webapp-config.sh
rename to app/data/container-build/cerc-webapp-base/apply-webapp-config.sh
index bf041708..6d366805 100755
--- a/app/data/container-build/cerc-laconic-console-host/apply-webapp-config.sh
+++ b/app/data/container-build/cerc-webapp-base/apply-webapp-config.sh
@@ -18,7 +18,7 @@ if ! [[ -d ${webapp_files_dir} ]]; then
fi
# First some magic using yq to translate our yaml config file into an array of key value pairs like:
# LACONIC_HOSTED_CONFIG_=
-readarray -t config_kv_pair_array < <( yq '.. | select(length > 2) | ([path | join("_"), .] | join("=") )' ${config_file_name} | sed 's/^/LACONIC_HOSTED_CONFIG_/' )
+readarray -t config_kv_pair_array < <( yq '.. | ([path | join("_"), .] | join("=") )' ${config_file_name} | sort -r | sed -e '$ d' | sed 's/^/LACONIC_HOSTED_CONFIG_/' )
declare -p config_kv_pair_array
# Then iterate over that kv array making the template substitution in our web app files
for kv_pair_string in "${config_kv_pair_array[@]}"
diff --git a/app/data/container-build/cerc-webapp-base/build.sh b/app/data/container-build/cerc-webapp-base/build.sh
new file mode 100755
index 00000000..51712dc4
--- /dev/null
+++ b/app/data/container-build/cerc-webapp-base/build.sh
@@ -0,0 +1,9 @@
+#!/usr/bin/env bash
+# Build cerc/laconic-registry-cli
+
+source ${CERC_CONTAINER_BASE_DIR}/build-base.sh
+
+# See: https://stackoverflow.com/a/246128/1701505
+SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
+
+docker build -t cerc/webapp-base:local ${build_command_args} -f ${SCRIPT_DIR}/Dockerfile ${SCRIPT_DIR}
diff --git a/app/data/container-build/cerc-webapp-base/config.yml b/app/data/container-build/cerc-webapp-base/config.yml
new file mode 100644
index 00000000..c69b6752
--- /dev/null
+++ b/app/data/container-build/cerc-webapp-base/config.yml
@@ -0,0 +1 @@
+# Put config here.
diff --git a/app/data/container-build/cerc-webapp-base/start-serving-app.sh b/app/data/container-build/cerc-webapp-base/start-serving-app.sh
new file mode 100755
index 00000000..69fa6c22
--- /dev/null
+++ b/app/data/container-build/cerc-webapp-base/start-serving-app.sh
@@ -0,0 +1,9 @@
+#!/usr/bin/env bash
+if [ -n "$CERC_SCRIPT_DEBUG" ]; then
+ set -x
+fi
+
+CERC_WEBAPP_FILES_DIR="${CERC_WEBAPP_FILES_DIR:-/data}"
+
+/scripts/apply-webapp-config.sh /config/config.yml ${CERC_WEBAPP_FILES_DIR}
+http-server -p 80 ${CERC_WEBAPP_FILES_DIR}
diff --git a/app/data/container-image-list.txt b/app/data/container-image-list.txt
index b674baf8..780bda53 100644
--- a/app/data/container-image-list.txt
+++ b/app/data/container-image-list.txt
@@ -49,3 +49,9 @@ cerc/sushiswap-v3-periphery
cerc/watcher-sushiswap
cerc/graph-node
cerc/sushiswap-subgraphs
+cerc/webapp-base
+cerc/watcher-mobymask-v3
+cerc/go-nitro
+cerc/nitro-contracts
+cerc/mobymask-snap
+cerc/ponder
diff --git a/app/data/pod-list.txt b/app/data/pod-list.txt
index c6e55b8d..ed7f17d5 100644
--- a/app/data/pod-list.txt
+++ b/app/data/pod-list.txt
@@ -32,3 +32,13 @@ lasso
reth
watcher-sushiswap
contract-sushiswap
+graph-node
+sushiswap-subgraph-v3
+fixturenet-sushiswap-subgraph-v3
+watcher-mobymask-v3
+mobymask-app-v3
+go-nitro
+nitro-contracts
+nitro-reverse-payment-proxy
+mobymask-snap
+ponder
diff --git a/app/data/repository-list.txt b/app/data/repository-list.txt
index de696335..d41074c1 100644
--- a/app/data/repository-list.txt
+++ b/app/data/repository-list.txt
@@ -10,7 +10,7 @@ github.com/cerc-io/laconicd
github.com/cerc-io/laconic-sdk
github.com/cerc-io/laconic-registry-cli
github.com/cerc-io/laconic-console
-github.com/cerc-io/mobymask-watcher
+github.com/cerc-io/mobymask-watcher-ts
github.com/cerc-io/watcher-ts
github.com/cerc-io/mobymask-v2-watcher-ts
github.com/cerc-io/MobyMask
@@ -43,3 +43,7 @@ github.com/cerc-io/sushiswap-v3-core
github.com/cerc-io/sushiswap-v3-periphery
github.com/graphprotocol/graph-node
github.com/sushiswap/subgraphs
+github.com/cerc-io/go-nitro
+github.com/cerc-io/ts-nitro
+github.com/cerc-io/mobymask-snap
+github.com/cerc-io/ponder
diff --git a/app/data/stacks/azimuth/stack.yml b/app/data/stacks/azimuth/stack.yml
index bb7f2a88..47e0d058 100644
--- a/app/data/stacks/azimuth/stack.yml
+++ b/app/data/stacks/azimuth/stack.yml
@@ -1,7 +1,7 @@
version: "1.0"
name: azimuth
repos:
- - github.com/cerc-io/azimuth-watcher-ts
+ - github.com/cerc-io/azimuth-watcher-ts@v0.1.1
containers:
- cerc/watcher-azimuth
pods:
diff --git a/app/data/stacks/build-support/stack.yml b/app/data/stacks/build-support/stack.yml
index e0962515..4544a8c6 100644
--- a/app/data/stacks/build-support/stack.yml
+++ b/app/data/stacks/build-support/stack.yml
@@ -1,5 +1,5 @@
version: "1.2"
name: build-support
-decription: "Build Support Components"
+description: "Build Support Components"
containers:
- cerc/builder-js
diff --git a/app/data/stacks/chain-chunker/stack.yml b/app/data/stacks/chain-chunker/stack.yml
index 2d36330b..318afd07 100644
--- a/app/data/stacks/chain-chunker/stack.yml
+++ b/app/data/stacks/chain-chunker/stack.yml
@@ -1,6 +1,6 @@
version: "1.0"
name: chain-chunker
-decription: "Stack to build containers for chain-chunker"
+description: "Stack to build containers for chain-chunker"
repos:
- github.com/cerc-io/ipld-eth-state-snapshot@v5
- github.com/cerc-io/eth-statediff-service@v5
diff --git a/app/data/stacks/fixturenet-eth-loaded/stack.yml b/app/data/stacks/fixturenet-eth-loaded/stack.yml
index 7f6684d1..b8ae5681 100644
--- a/app/data/stacks/fixturenet-eth-loaded/stack.yml
+++ b/app/data/stacks/fixturenet-eth-loaded/stack.yml
@@ -1,6 +1,6 @@
version: "1.0"
name: fixturenet-eth-loaded
-decription: "Loaded Ethereum Fixturenet"
+description: "Loaded Ethereum Fixturenet"
repos:
- github.com/cerc-io/go-ethereum
- github.com/cerc-io/tx-spammer
diff --git a/app/data/stacks/fixturenet-eth-tx/stack.yml b/app/data/stacks/fixturenet-eth-tx/stack.yml
index f8cdcfed..44cf1760 100644
--- a/app/data/stacks/fixturenet-eth-tx/stack.yml
+++ b/app/data/stacks/fixturenet-eth-tx/stack.yml
@@ -1,6 +1,6 @@
version: "1.2"
name: fixturenet-eth-tx
-decription: "Ethereum Fixturenet w/ tx-spammer"
+description: "Ethereum Fixturenet w/ tx-spammer"
repos:
- github.com/cerc-io/go-ethereum
- github.com/cerc-io/tx-spammer
diff --git a/app/data/stacks/fixturenet-eth/stack.yml b/app/data/stacks/fixturenet-eth/stack.yml
index 5432e6d3..8790f36e 100644
--- a/app/data/stacks/fixturenet-eth/stack.yml
+++ b/app/data/stacks/fixturenet-eth/stack.yml
@@ -1,6 +1,6 @@
version: "1.1"
name: fixturenet-eth
-decription: "Ethereum Fixturenet"
+description: "Ethereum Fixturenet"
repos:
- github.com/cerc-io/go-ethereum
- github.com/cerc-io/lighthouse
diff --git a/app/data/stacks/fixturenet-graph-node/README.md b/app/data/stacks/fixturenet-graph-node/README.md
deleted file mode 100644
index 6e010b50..00000000
--- a/app/data/stacks/fixturenet-graph-node/README.md
+++ /dev/null
@@ -1,3 +0,0 @@
-# Graph-Node Fixturenet
-
-Experimental
\ No newline at end of file
diff --git a/app/data/stacks/fixturenet-graph-node/stack.yml b/app/data/stacks/fixturenet-graph-node/stack.yml
deleted file mode 100644
index e7ab10f3..00000000
--- a/app/data/stacks/fixturenet-graph-node/stack.yml
+++ /dev/null
@@ -1,12 +0,0 @@
-version: "1.0"
-name: fixturenet-graph-node
-description: "A graph-node fixturenet"
-repos:
- - github.com/filecoin-project/lotus
- - github.com/graphprotocol/graph-node
-containers:
- - cerc/lotus
- - cerc/graph-node
-pods:
- - fixturenet-lotus
- - fixturenet-graph-node
diff --git a/app/data/stacks/fixturenet-laconic-loaded/README.md b/app/data/stacks/fixturenet-laconic-loaded/README.md
index b94189c2..c82e48ad 100644
--- a/app/data/stacks/fixturenet-laconic-loaded/README.md
+++ b/app/data/stacks/fixturenet-laconic-loaded/README.md
@@ -4,6 +4,8 @@ Testing a "Loaded" fixturenet with console.
Instructions for deploying a local Laconic blockchain "fixturenet" for development and testing purposes using laconic-stack-orchestrator.
+**Note:** For building some NPMs, access to the @lirewine repositories is required. If you don't have access, see [this tutorial](/docs/laconicd-fixturenet.md) to run this stack
+
## 1. Install Laconic Stack Orchestrator
Installation is covered in detail [here](https://github.com/cerc-io/stack-orchestrator#user-mode) but if you're on Linux and already have docker installed it should be as simple as:
```
@@ -30,24 +32,24 @@ $ export CERC_NPM_AUTH_TOKEN=
## 3. Clone required repositories
```
-$ laconic-so --stack fixturenet-laconicd setup-repositories
+$ laconic-so --stack fixturenet-laconic-loaded setup-repositories
```
## 4. Build the stack's packages and containers
```
-$ laconic-so --stack fixturenet-laconicd build-npms
-$ laconic-so --stack fixturenet-laconicd build-containers
+$ laconic-so --stack fixturenet-laconic-loaded build-npms
+$ laconic-so --stack fixturenet-laconic-loaded build-containers
```
## 5. Deploy the stack
```
-$ laconic-so --stack fixturenet-laconicd deploy up
+$ laconic-so --stack fixturenet-laconic-loaded deploy up
```
Correct operation should be verified by checking the laconicd container's logs with:
```
-$ laconic-so --stack fixturenet-laconicd deploy logs
+$ laconic-so --stack fixturenet-laconic-loaded deploy logs
```
## 6. Test with the Registry CLI
```
-$ laconic-so --stack fixturenet-laconicd deploy exec cli "laconic cns status"
+$ laconic-so --stack fixturenet-laconic-loaded deploy exec cli "laconic cns status"
```
## 7. View the laconic console
Get the URL for the console web app with this command (the port number will be different for each deployment):
diff --git a/app/data/stacks/fixturenet-laconic-loaded/stack.yml b/app/data/stacks/fixturenet-laconic-loaded/stack.yml
index 3102978a..523a7091 100644
--- a/app/data/stacks/fixturenet-laconic-loaded/stack.yml
+++ b/app/data/stacks/fixturenet-laconic-loaded/stack.yml
@@ -21,6 +21,7 @@ npms:
containers:
- cerc/laconicd
- cerc/laconic-registry-cli
+ - cerc/webapp-base
- cerc/laconic-console-host
pods:
- fixturenet-laconicd
diff --git a/app/data/stacks/fixturenet-optimism/README.md b/app/data/stacks/fixturenet-optimism/README.md
index c083ee68..4d933f83 100644
--- a/app/data/stacks/fixturenet-optimism/README.md
+++ b/app/data/stacks/fixturenet-optimism/README.md
@@ -18,6 +18,11 @@ Build the container images:
```bash
laconic-so --stack fixturenet-optimism build-containers
+
+# If redeploying with changes in the stack containers
+laconic-so --stack fixturenet-optimism build-containers --force-rebuild
+
+# If errors are thrown during build, old images used by this stack would have to be deleted
```
Note: this will take >10 mins depending on the specs of your machine, and **requires** 16GB of memory or greater.
diff --git a/app/data/stacks/fixturenet-optimism/stack.yml b/app/data/stacks/fixturenet-optimism/stack.yml
index 00ccd7fc..9cd4d2bc 100644
--- a/app/data/stacks/fixturenet-optimism/stack.yml
+++ b/app/data/stacks/fixturenet-optimism/stack.yml
@@ -1,6 +1,6 @@
version: "1.0"
name: fixturenet-optimism
-decription: "Optimism Fixturenet"
+description: "Optimism Fixturenet"
repos:
- github.com/cerc-io/go-ethereum
- github.com/cerc-io/lighthouse
diff --git a/app/data/stacks/fixturenet-payments/.env.fixturenet b/app/data/stacks/fixturenet-payments/.env.fixturenet
new file mode 100644
index 00000000..3ebaa13f
--- /dev/null
+++ b/app/data/stacks/fixturenet-payments/.env.fixturenet
@@ -0,0 +1,25 @@
+# Required for:
+# Nitro contracts deployment
+# MobyMask contract deployment
+CERC_PRIVATE_KEY_DEPLOYER="0x888814df89c4358d7ddb3fa4b0213e7331239a80e1f013eaa7b2deca2a41a218"
+
+# ipld-eth-server's go-nitro node credentials
+NITRO_PK=2d999770f7b5d49b694080f987b82bbc9fc9ac2b4dcc10b0f8aba7d700f69c6d
+NITRO_CHAIN_PK=570b909da9669b2f35a0b1ac70b8358516d55ae1b5b3710e95e9a94395090597
+
+# Watcher's nitro node credentials
+CERC_WATCHER_NITRO_PK="0279651921cd800ac560c21ceea27aab0107b67daf436cdd25ce84cad30159b4"
+
+# Used for sending MobyMask chain txs; also serves as chain pk for watcher's nitro node
+CERC_PRIVATE_KEY_PEER="111b7500bdce494d6f4bcfe8c2a0dde2ef92f751d9070fac6475dbd6d8021b3f"
+
+# Ponder app's nitro node credentials
+CERC_PONDER_NITRO_PK=58368d20ff12f17669c06158c21d885897aa56f9be430edc789614bf9851d53f
+CERC_PONDER_NITRO_CHAIN_PK=fb1e9af328c283ca3e2486e7c24d13582b7912057d8b9542ff41503c85bc05c0
+
+# Used by watcher and ponder app for sending upstream payments
+CERC_UPSTREAM_NITRO_ADDRESS="0xAAA6628Ec44A8a742987EF3A114dDFE2D4F7aDCE" # corresponds to NITRO_PK
+CERC_UPSTREAM_NITRO_MULTIADDR="/dns4/go-nitro/tcp/5005/ws/p2p/16Uiu2HAmSjXJqsyBJgcBUU2HQmykxGseafSatbpq5471XmuaUqyv"
+
+# Used by the the MobyMask app to make payments to watcher
+CERC_PAYMENT_NITRO_ADDRESS="0xBBB676f9cFF8D242e9eaC39D063848807d3D1D94" # corresponds to CERC_WATCHER_NITRO_PK
diff --git a/app/data/stacks/fixturenet-payments/README.md b/app/data/stacks/fixturenet-payments/README.md
new file mode 100644
index 00000000..2525b6d6
--- /dev/null
+++ b/app/data/stacks/fixturenet-payments/README.md
@@ -0,0 +1,70 @@
+# fixturenet-payments
+
+## Setup
+
+Clone required repositories:
+
+```bash
+laconic-so --stack fixturenet-payments setup-repositories --pull
+```
+
+Build the container images:
+
+```bash
+laconic-so --stack fixturenet-payments build-containers
+```
+
+## Deploy
+
+### Configuration
+
+Deploy the stack:
+
+```bash
+laconic-so --stack fixturenet-payments deploy --cluster payments up
+
+# Exposed on host ports:
+# 5005: go-nitro node's p2p msg port
+# 8081: reverse payment proxy's RPC endpoint
+# 15432: MobyMask v3 watcher's db endpoint
+# 3001: MobyMask v3 watcher endpoint
+# 9090: MobyMask v3 watcher relay node endpoint
+# 8080: MobyMask snap
+# 3004: MobyMask v3 app
+```
+
+Check the logs of the MobyMask contract deployment container to get the deployed contract's address and generated root invite link:
+
+```bash
+docker logs -f $(docker ps -aq --filter name="mobymask-1")
+```
+
+Check the reverse payment proxy container logs:
+
+```bash
+docker logs -f $(docker ps -aq --filter name="nitro-reverse-payment-proxy")
+```
+
+Run the ponder app:
+
+```bash
+docker exec -it payments-ponder-app-1 bash -c "pnpm start"
+```
+
+## Clean up
+
+Stop all the services running in background:
+
+```bash
+laconic-so --stack fixturenet-payments deploy --cluster payments down 30
+```
+
+Clear volumes created by this stack:
+
+```bash
+# List all relevant volumes
+docker volume ls -q --filter "name=[payments"
+
+# Remove all the listed volumes
+docker volume rm $(docker volume ls -q --filter "name=[payments")
+```
diff --git a/app/data/stacks/fixturenet-payments/stack.yml b/app/data/stacks/fixturenet-payments/stack.yml
new file mode 100644
index 00000000..98b26928
--- /dev/null
+++ b/app/data/stacks/fixturenet-payments/stack.yml
@@ -0,0 +1,53 @@
+version: "1.0"
+name: fixturenet-payments
+description: "Stack to demonstrate payments between various services"
+repos:
+ # fixturenet repos
+ - github.com/cerc-io/go-ethereum
+ - github.com/cerc-io/lighthouse
+ - github.com/cerc-io/ipld-eth-db
+ - github.com/cerc-io/ipld-eth-server
+ # nitro repos
+ - github.com/cerc-io/ts-nitro@v0.1.12
+ - github.com/cerc-io/go-nitro@v0.1.0-ts-port-0.1.4 # TODO: Update after fixes
+ # mobymask watcher repos
+ - github.com/cerc-io/watcher-ts@v0.2.61
+ - github.com/cerc-io/mobymask-v2-watcher-ts@v3 # TODO: Update after fixes
+ - github.com/cerc-io/MobyMask@v0.1.3
+ # mobymask app repos
+ - github.com/cerc-io/mobymask-snap
+ - github.com/cerc-io/mobymask-ui@v0.2.0
+ # ponder repo
+ - github.com/cerc-io/ponder@laconic
+containers:
+ # fixturenet images
+ - cerc/go-ethereum
+ - cerc/lighthouse
+ - cerc/lighthouse-cli
+ - cerc/fixturenet-eth-genesis
+ - cerc/fixturenet-eth-geth
+ - cerc/fixturenet-eth-lighthouse
+ - cerc/ipld-eth-db
+ - cerc/ipld-eth-server
+ - cerc/nitro-contracts
+ - cerc/go-nitro
+ # mobymask watcher images
+ - cerc/watcher-ts
+ - cerc/watcher-mobymask-v3
+ - cerc/mobymask
+ # mobymask app images
+ - cerc/mobymask-snap
+ - cerc/mobymask-ui
+ # ponder image
+ - cerc/ponder
+pods:
+ - fixturenet-eth
+ - ipld-eth-server
+ - ipld-eth-db
+ - nitro-contracts
+ - go-nitro
+ - nitro-reverse-payment-proxy
+ - watcher-mobymask-v3
+ - mobymask-snap
+ - mobymask-app-v3
+ - ponder
diff --git a/app/data/stacks/fixturenet-plugeth-tx/README.md b/app/data/stacks/fixturenet-plugeth-tx/README.md
index 832ed749..50bfd919 100644
--- a/app/data/stacks/fixturenet-plugeth-tx/README.md
+++ b/app/data/stacks/fixturenet-plugeth-tx/README.md
@@ -12,6 +12,7 @@ See `stacks/fixturenet-eth/README.md` for more information.
* cerc/tx-spammer
## Deploy the stack
+Note: since some Go dependencies are currently private, `CERC_GO_AUTH_TOKEN` must be set to a valid Gitea access token before running the `build-containers` command.
```
$ laconic-so --stack fixturenet-plugeth-tx setup-repositories
$ laconic-so --stack fixturenet-plugeth-tx build-containers
diff --git a/app/data/stacks/fixturenet-plugeth-tx/stack.yml b/app/data/stacks/fixturenet-plugeth-tx/stack.yml
index 7e91ce93..a8db1065 100644
--- a/app/data/stacks/fixturenet-plugeth-tx/stack.yml
+++ b/app/data/stacks/fixturenet-plugeth-tx/stack.yml
@@ -1,6 +1,6 @@
version: "1.2"
name: fixturenet-plugeth-tx
-decription: "plugeth Ethereum Fixturenet w/ tx-spammer"
+description: "plugeth Ethereum Fixturenet w/ tx-spammer"
repos:
- git.vdb.to/cerc-io/plugeth@statediff
- git.vdb.to/cerc-io/plugeth-statediff
diff --git a/app/data/stacks/fixturenet-sushiswap-subgraph/README.md b/app/data/stacks/fixturenet-sushiswap-subgraph/README.md
new file mode 100644
index 00000000..b1151271
--- /dev/null
+++ b/app/data/stacks/fixturenet-sushiswap-subgraph/README.md
@@ -0,0 +1,204 @@
+# Fixturenet SushiSwap Subgraph
+
+## Setup
+
+Clone required repositories:
+
+```bash
+laconic-so --stack fixturenet-sushiswap-subgraph setup-repositories --pull
+```
+
+Checkout to a non-default branch in the cloned repos if required:
+
+```bash
+# Default repo base dir
+cd ~/cerc
+
+# Example
+cd graph-node
+git checkout && git pull
+
+# Remove the corresponding docker image if it already exists
+docker image rm cerc/graph-node:local
+# Remove any dangling images
+docker image prune
+```
+
+Build the container images:
+
+```bash
+laconic-so --stack fixturenet-sushiswap-subgraph build-containers
+```
+
+## Deploy
+
+Deploy the stack:
+
+```bash
+laconic-so --stack fixturenet-sushiswap-subgraph deploy --cluster sushigraph up
+
+# Note: Remove any existing volumes for the cluster for a fresh start
+```
+
+After all services have started:
+
+* Follow `graph-node` logs:
+
+ ```bash
+ laconic-so --stack fixturenet-sushiswap-subgraph deploy --cluster sushigraph logs -f graph-node
+ ```
+
+* Check that the subgraphs have been deployed:
+
+ ```bash
+ laconic-so --stack fixturenet-sushiswap-subgraph deploy --cluster sushigraph logs -f sushiswap-subgraph-v3
+
+ # Expected output:
+ # .
+ # .
+ # sushigraph-sushiswap-subgraph-v3-1 | - Deploying to Graph node http://graph-node:8020/
+ # sushigraph-sushiswap-subgraph-v3-1 | Deployed to http://graph-node:8000/subgraphs/name/sushiswap/v3-lotus/graphql
+ # sushigraph-sushiswap-subgraph-v3-1 |
+ # sushigraph-sushiswap-subgraph-v3-1 |
+ # sushigraph-sushiswap-subgraph-v3-1 | Subgraph endpoints:
+ # sushigraph-sushiswap-subgraph-v3-1 | Queries (HTTP): http://graph-node:8000/subgraphs/name/sushiswap/v3-lotus
+ # .
+ # .
+ # sushigraph-sushiswap-subgraph-v3-1 | - Deploying to Graph node http://graph-node:8020/
+ # sushigraph-sushiswap-subgraph-v3-1 | Deployed to http://graph-node:8000/subgraphs/name/sushiswap/blocks/graphql
+ # sushigraph-sushiswap-subgraph-v3-1 |
+ # sushigraph-sushiswap-subgraph-v3-1 |
+ # sushigraph-sushiswap-subgraph-v3-1 | Subgraph endpoints:
+ # sushigraph-sushiswap-subgraph-v3-1 | Queries (HTTP): http://graph-node:8000/subgraphs/name/sushiswap/blocks
+ # sushigraph-sushiswap-subgraph-v3-1 |
+ # sushigraph-sushiswap-subgraph-v3-1 |
+ # sushigraph-sushiswap-subgraph-v3-1 | Done
+ ```
+
+After `graph-node` has fetched the latest blocks from upstream, use the subgraph (GQL) endpoints for querying:
+
+```bash
+# Find out the mapped host port for the subgraph endpoint
+laconic-so --stack fixturenet-sushiswap-subgraph deploy --cluster sushigraph port graph-node 8000
+# 0.0.0.0:HOST_PORT
+
+# Blocks subgraph endpoint:
+http://127.0.0.1:/subgraphs/name/sushiswap/blocks/graphql
+
+# v3 subgraph endpoint:
+http://127.0.0.1:/subgraphs/name/sushiswap/v3-lotus/graphql
+```
+
+## Run
+
+* Deploy an ERC20 token:
+
+ ```bash
+ docker exec -it sushigraph-sushiswap-v3-periphery-1 yarn hardhat --network docker deploy --tags TestERC20
+
+ # Deploy two tokens and set the addresses to variables TOKEN1_ADDRESS and TOKEN2_ADDRESS
+ export TOKEN1_ADDRESS=
+ export TOKEN2_ADDRESS=
+ ```
+
+* Get contract address of factory deployed:
+
+ ```bash
+ docker exec -it sushigraph-sushiswap-v3-core-1 jq -r '.address' /app/deployments/docker/UniswapV3Factory.json
+
+ # Set the address to variable FACTORY_ADDRESS
+ export FACTORY_ADDRESS=
+ ```
+
+* Create a pool:
+
+ ```bash
+ docker exec -it sushigraph-sushiswap-v3-core-1 pnpm run pool:create:docker --factory $FACTORY_ADDRESS --token0 $TOKEN1_ADDRESS --token1 $TOKEN2_ADDRESS --fee 500
+
+ # Set the created pool address to variable POOL_ADDRESS
+ export POOL_ADDRESS=
+ ```
+
+* Initialize the pool:
+
+ ```bash
+ docker exec -it sushigraph-sushiswap-v3-core-1 pnpm run pool:initialize:docker --sqrt-price 4295128939 --pool $POOL_ADDRESS
+ ```
+
+* Set the recipient address to the contract deployer:
+
+ ```bash
+ export RECIPIENT=0xD375B03bd3A2434A9f675bEC4Ccd68aC5e67C743
+ ```
+
+* Trigger pool `Mint` event:
+
+ ```bash
+ docker exec -it sushigraph-sushiswap-v3-core-1 pnpm run pool:mint:docker --pool $POOL_ADDRESS --recipient $RECIPIENT --amount 10
+ ```
+
+* Trigger pool `Burn` event:
+
+ ```bash
+ docker exec -it sushigraph-sushiswap-v3-core-1 pnpm run pool:burn:docker --pool $POOL_ADDRESS --amount 10
+ ```
+
+* Query the sushiswap v3-lotus subgraph GQL after running above commands
+
+ ```graphql
+ {
+ _meta {
+ block {
+ number
+ }
+ deployment
+ hasIndexingErrors
+ }
+
+ factories {
+ poolCount
+ id
+ }
+
+ pools {
+ id
+ token0 {
+ id
+ name
+ symbol
+ }
+ mints {
+ id
+ owner
+ }
+ burns {
+ id
+ owner
+ }
+ }
+ }
+ ```
+
+## Clean up
+
+Stop all the services running in background run:
+
+```bash
+laconic-so --stack fixturenet-sushiswap-subgraph deploy --cluster sushigraph down
+```
+
+Clear volumes created by this stack:
+
+```bash
+# List all relevant volumes
+docker volume ls -q --filter "name=sushigraph"
+
+# Remove all the listed volumes
+docker volume rm $(docker volume ls -q --filter "name=sushigraph")
+
+# WARNING: To avoid refetching the Lotus proof params on the next run,
+# avoid removing the corresponding volumes
+
+# To remove volumes that do not contain Lotus params
+docker volume rm $(docker volume ls -q --filter "name=sushigraph" | grep -v "params$")
+```
diff --git a/app/data/stacks/fixturenet-sushiswap-subgraph/stack.yml b/app/data/stacks/fixturenet-sushiswap-subgraph/stack.yml
new file mode 100644
index 00000000..3e23421d
--- /dev/null
+++ b/app/data/stacks/fixturenet-sushiswap-subgraph/stack.yml
@@ -0,0 +1,28 @@
+version: "1.0"
+name: fixturenet-sushiswap-subgraph
+description: "An end-to-end SushiSwap Subgraph stack"
+repos:
+ # fixturenet-lotus repo
+ - github.com/filecoin-project/lotus
+ # graph-node repo
+ - github.com/graphprotocol/graph-node
+ # sushiswap repos
+ - github.com/cerc-io/sushiswap-v3-core@watcher-ts
+ - github.com/cerc-io/sushiswap-v3-periphery@watcher-ts
+ # sushiswap subgraph repo
+ - github.com/sushiswap/subgraphs
+containers:
+ # fixturenet-lotus image
+ - cerc/lotus
+ # graph-node image
+ - cerc/graph-node
+ # sushiswap contract deployment images
+ - cerc/sushiswap-v3-core
+ - cerc/sushiswap-v3-periphery
+ # sushiswap subgraphs image
+ - cerc/sushiswap-subgraphs
+pods:
+ - fixturenet-lotus
+ - graph-node
+ - contract-sushiswap
+ - fixturenet-sushiswap-subgraph-v3
diff --git a/app/data/stacks/graph-node/README.md b/app/data/stacks/graph-node/README.md
new file mode 100644
index 00000000..0527efc0
--- /dev/null
+++ b/app/data/stacks/graph-node/README.md
@@ -0,0 +1,157 @@
+# Graph Node
+
+## Setup
+
+Clone required repositories:
+
+```bash
+laconic-so --stack graph-node setup-repositories --pull
+```
+
+Checkout to a non-default branch in the cloned repos if required:
+
+```bash
+# Default repo base dir
+cd ~/cerc
+
+# Example
+cd graph-node
+git checkout && git pull
+
+# Remove the corresponding docker image if it already exists
+docker image rm cerc/graph-node:local
+# Remove any dangling images
+docker image prune
+```
+
+Build the container images:
+
+```bash
+laconic-so --stack graph-node build-containers
+```
+
+## Create a deployment
+
+Initialize deployment and create "spec" file:
+
+```bash
+laconic-so --stack graph-node deploy init --output graph-node-spec.yml
+```
+
+We need to assign fixed ports: `8000` for subgraph GQL endpoint, `8020` for subgraph deployment and `5001` for IPFS. The values can be
+customized by editing the "spec" file generated by `laconic-so deploy init`.
+```
+$ cat graph-node-spec.yml
+stack: graph-node
+ports:
+ graph-node:
+ - '8000:8000'
+ - '8001'
+ - '8020:8020'
+ - '8030'
+ ipfs:
+ - '8080'
+ - '4001'
+ - '5001:5001'
+...
+```
+
+Create deployment:
+
+```bash
+laconic-so deploy create --spec-file graph-node-spec.yml --deployment-dir graph-node-deployment
+```
+
+## Start the stack
+
+Create an env file with the following values to be set before starting the stack:
+
+```bash
+# Set ETH RPC endpoint the graph node will use
+
+# Host and port of the ETH RPC endpoint to check before starting graph-node
+export ETH_RPC_HOST=
+export ETH_RPC_PORT=
+
+# The etherum network(s) graph-node will connect to
+# Set this to a space-separated list of the networks where each entry has the form NAME:URL
+export ETH_NETWORKS=
+```
+
+Example env file:
+
+```bash
+export ETH_RPC_HOST=filecoin.chainup.net
+export ETH_RPC_PORT=443
+
+export ETH_NETWORKS=filecoin:https://filecoin.chainup.net/rpc/v1
+```
+
+Set the environment variables:
+
+```bash
+source
+```
+
+Deploy the stack:
+
+```bash
+laconic-so deployment --dir graph-node-deployment start
+
+# Note: Remove any existing volumes in the cluster for a fresh start
+```
+
+After all services have started, follow `graph-node` logs:
+
+```bash
+laconic-so deployment --dir graph-node-deployment logs -f graph-node
+```
+
+Subgraphs can now be deployed to the graph-node.
+Follow [Deploy the Subgraph](https://github.com/graphprotocol/graph-node/blob/v0.32.0/docs/getting-started.md#24-deploy-the-subgraph) section in graph-node docs for an existing subgraph.
+
+## Set environment variables
+
+* The graph-node environment variable `ETHEREUM_REORG_THRESHOLD` can be set in the deployment compose file
+ ```bash
+ $ cat graph-node-deployment/compose/docker-compose-graph-node.yml
+ services:
+ graph-node:
+ image: cerc/graph-node:local
+ ...
+ environment:
+ ...
+ GRAPH_LOG: debug
+ ETHEREUM_REORG_THRESHOLD: 3
+ ```
+ Change `ETHEREUM_REORG_THRESHOLD` to desired value
+
+ * To restart graph-node with updated values
+ * Stop the stack first
+ ```bash
+ laconic-so deployment --dir graph-node-deployment stop
+ ```
+ * Start the stack again
+ ```
+ laconic-so deployment --dir graph-node-deployment start
+ ```
+ * To check if environment variable has been updated in graph-node container
+ ```bash
+ $ laconic-so deployment --dir graph-node-deployment exec graph-node bash
+ root@dc4d3abe1615:/# echo $ETHEREUM_REORG_THRESHOLD
+ 16
+ ```
+
+## Clean up
+
+Stop all the services running in background run:
+
+```bash
+laconic-so deployment --dir graph-node-deployment stop
+```
+
+Clear volumes created by this stack:
+
+```bash
+laconic-so deployment --dir graph-node-deployment stop --delete-volumes
+```
diff --git a/app/data/stacks/graph-node/deploy-subgraph.md b/app/data/stacks/graph-node/deploy-subgraph.md
new file mode 100644
index 00000000..73e200b2
--- /dev/null
+++ b/app/data/stacks/graph-node/deploy-subgraph.md
@@ -0,0 +1,68 @@
+# Deploying Subgraph
+
+## Setup
+
+We will use the [ethereum-gravatar](https://github.com/graphprotocol/graph-tooling/tree/%40graphprotocol/graph-cli%400.58.0/examples/ethereum-gravatar) example subgraph from `graphprotocol/graph-tooling` repo
+
+- Clone the repo
+ ```bash
+ git clone git@github.com:graphprotocol/graph-tooling.git
+
+ cd graph-tooling
+ ```
+
+- Install dependencies
+ ```bash
+ pnpm install
+ ```
+
+- Change directory to example-subgraph
+ ```bash
+ cd examples/ethereum-gravatar
+ ```
+
+## Deploy
+
+The following steps should be similar for every subgraph
+
+- Change the network and address in `subgraph.yaml`
+ ```yaml
+ ...
+ dataSources:
+ - kind: ethereum/contract
+ name: Gravity
+ network:
+ source:
+ address: ''
+ abi: Gravity
+ startBlock:
+ ...
+ ```
+ - `CONTRACT_ADDRESS` is the address of the deployed contract on the desired network
+ - `START_BLOCK` is the block number after which we want to process events
+ - `NETWORK_NAME` is the name of the network specified when deploying graph-node
+ - When deploying graph-node `ETH_NETWORKS` env is set to a space-separated list of the networks where each entry has the form `NAME:URL`
+ - The `NAME` can be used in subgraph to specify which network to use
+ - More details can be seen in [Start the stack](./README.md#start-the-stack) section
+
+- Build the subgraph
+ ```bash
+ pnpm codegen
+ pnpm build
+ ```
+
+- Create and deploy the subgraph
+ ```bash
+ pnpm graph create example --node
+
+ pnpm graph deploy example --ipfs --node
+ ```
+ - `GRAPH_NODE_DEPLOY_ENDPOINT` and `GRAPH_NODE_IPFS_ENDPOINT` will be available after graph-node has been deployed
+ - More details can be seen in [Create a deployment](./README.md#create-a-deployment) section
+
+- The subgraph GQL endpoint will be seen after deploy command runs successfully
+
+- To remove the subgraph
+ ```bash
+ pnpm graph remove --node example
+ ```
diff --git a/app/data/stacks/graph-node/stack.yml b/app/data/stacks/graph-node/stack.yml
new file mode 100644
index 00000000..ce45e965
--- /dev/null
+++ b/app/data/stacks/graph-node/stack.yml
@@ -0,0 +1,9 @@
+version: "1.0"
+name: graph-node
+description: "Stack for running graph-node"
+repos:
+ - github.com/graphprotocol/graph-node
+containers:
+ - cerc/graph-node
+pods:
+ - graph-node
diff --git a/app/data/stacks/mainnet-eth/README.md b/app/data/stacks/mainnet-eth/README.md
index 23c8d325..8f0dc1c4 100644
--- a/app/data/stacks/mainnet-eth/README.md
+++ b/app/data/stacks/mainnet-eth/README.md
@@ -17,7 +17,7 @@ $ laconic-so --stack mainnet-eth build-containers
## Create a deployment
```
-$ laconic-so --stack mainnet-eth deploy init --output mainnet-eth-spec.yml
+$ laconic-so --stack mainnet-eth deploy init --map-ports-to-host any-same --output mainnet-eth-spec.yml
$ laconic-so deploy create --spec-file mainnet-eth-spec.yml --deployment-dir mainnet-eth-deployment
```
## Start the stack
@@ -73,7 +73,7 @@ After deleting the volumes, any subsequent re-start will begin chain sync from c
## Ports
It is usually necessary to expose certain container ports on one or more the host's addresses to allow incoming connections.
-Any ports defined in the Docker compose file are exposed by default with random port assignments, but the values can be
+Any ports defined in the Docker compose file are exposed by default with random port assignments, bound to "any" interface (IP address 0.0.0.0), but the port mappings can be
customized by editing the "spec" file generated by `laconic-so deploy init`.
In this example, ports `8545` and `5052` have been assigned to a specific addresses/port combination on the host, while
@@ -92,7 +92,15 @@ volumes:
mainnet_eth_geth_1_data: ./data/mainnet_eth_geth_1_data
mainnet_eth_lighthouse_1_data: ./data/mainnet_eth_lighthouse_1_data
```
-
+In addition, a stack-wide port mapping "recipe" can be applied at the time the
+`laconic-so deploy init` command is run, by supplying the desired recipe with the `--map-ports-to-host` option. The following recipes are supported:
+| Recipe | Host Port Mapping |
+|--------|-------------------|
+| any-variable-random | Bind to 0.0.0.0 using a random port assigned at start time (default) |
+| localhost-same | Bind to 127.0.0.1 using the same port number as exposed by the containers |
+| any-same | Bind to 0.0.0.0 using the same port number as exposed by the containers |
+| localhost-fixed-random | Bind to 127.0.0.1 using a random port number selected at the time the command is run (not checked for already in use)|
+| any-fixed-random | Bind to 0.0.0.0 using a random port number selected at the time the command is run (not checked for already in use) |
## Data volumes
Container data volumes are bind-mounted to specified paths in the host filesystem.
The default setup (generated by `laconic-so deploy init`) places the volumes in the `./data` subdirectory of the deployment directory:
@@ -130,4 +138,4 @@ $ sudo du -h mainnet-eth-deployment/data/
860G mainnet-eth-deployment/data/mainnet_eth_geth_1_data/geth
860G mainnet-eth-deployment/data/mainnet_eth_geth_1_data
885G mainnet-eth-deployment/data/
-```
\ No newline at end of file
+```
diff --git a/app/data/stacks/mainnet-eth/deploy/commands.py b/app/data/stacks/mainnet-eth/deploy/commands.py
index 09586aac..9fcecbcf 100644
--- a/app/data/stacks/mainnet-eth/deploy/commands.py
+++ b/app/data/stacks/mainnet-eth/deploy/commands.py
@@ -1,4 +1,4 @@
-# Copyright © 2023 Cerc
+# Copyright © 2023 Vulcanize
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
@@ -15,6 +15,7 @@
from secrets import token_hex
+
def init(ctx):
return None
@@ -23,7 +24,7 @@ def setup(ctx):
return None
-def create(ctx):
+def create(ctx, extra_args):
# Generate the JWT secret and save to its config file
secret = token_hex(32)
jwt_file_path = ctx.deployment_dir.joinpath("data", "mainnet_eth_config_data", "jwtsecret")
diff --git a/app/data/stacks/mainnet-eth/stack.yml b/app/data/stacks/mainnet-eth/stack.yml
index 5051eb9a..eca59ecc 100644
--- a/app/data/stacks/mainnet-eth/stack.yml
+++ b/app/data/stacks/mainnet-eth/stack.yml
@@ -1,15 +1,23 @@
-version: "1.1"
+version: "1.2"
name: mainnet-eth
-decription: "Ethereum Mainnet"
+description: "Ethereum Mainnet"
repos:
- github.com/cerc-io/go-ethereum
- github.com/cerc-io/lighthouse
- github.com/dboreham/foundry
+ - git.vdb.to/cerc-io/keycloak-reg-api
+ - git.vdb.to/cerc-io/keycloak-reg-ui
containers:
- cerc/go-ethereum
- cerc/lighthouse
- cerc/lighthouse-cli
- cerc/foundry
+ - cerc/keycloak
+ - cerc/webapp-base
+ - cerc/keycloak-reg-api
+ - cerc/keycloak-reg-ui
pods:
- mainnet-eth
+ - mainnet-eth-keycloak
+ - mainnet-eth-metrics
- foundry
diff --git a/app/data/stacks/mainnet-go-opera/stack.yml b/app/data/stacks/mainnet-go-opera/stack.yml
index 80815cdf..2597658e 100644
--- a/app/data/stacks/mainnet-go-opera/stack.yml
+++ b/app/data/stacks/mainnet-go-opera/stack.yml
@@ -1,6 +1,6 @@
version: "1.1"
name: mainnet-opera
-decription: "Fantom mainnet node"
+description: "Fantom mainnet node"
repos:
- github.com/Fantom-foundation/go-opera@release/1.1.2-rc.5
containers:
diff --git a/app/data/stacks/mainnet-laconic/deploy/commands.py b/app/data/stacks/mainnet-laconic/deploy/commands.py
index 0d4f5b8b..16bf015a 100644
--- a/app/data/stacks/mainnet-laconic/deploy/commands.py
+++ b/app/data/stacks/mainnet-laconic/deploy/commands.py
@@ -1,4 +1,4 @@
-# Copyright © 2022, 2023 Cerc
+# Copyright © 2022, 2023 Vulcanize
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
@@ -14,30 +14,293 @@
# along with this program. If not, see .
from app.util import get_yaml
-from app.deploy_types import DeployCommandContext, DeploymentContext
+from app.deploy_types import DeployCommandContext, LaconicStackSetupCommand, DeploymentContext
from app.stack_state import State
from app.deploy_util import VolumeMapping, run_container_command
+from app.command_types import CommandOptions
+from enum import Enum
+from pathlib import Path
+from shutil import copyfile, copytree
+import json
+import os
+import sys
+import tomli
+import re
default_spec_file_content = """config:
node_moniker: my-node-name
chain_id: my-chain-id
"""
-init_help_text = """Add helpful text here on setting config variables.
-"""
+
+class SetupPhase(Enum):
+ INITIALIZE = 1
+ JOIN = 2
+ CREATE = 3
+ ILLEGAL = 3
-def setup(command_context: DeployCommandContext):
- node_moniker = "dbdb-node"
- chain_id = "laconic_81337-1"
+def _client_toml_path(network_dir: Path):
+ return network_dir.joinpath("config", "client.toml")
+
+
+def _config_toml_path(network_dir: Path):
+ return network_dir.joinpath("config", "config.toml")
+
+
+def _get_chain_id_from_config(network_dir: Path):
+ chain_id = None
+ with open(_client_toml_path(network_dir), "rb") as f:
+ toml_dict = tomli.load(f)
+ chain_id = toml_dict["chain-id"]
+ return chain_id
+
+
+def _get_node_moniker_from_config(network_dir: Path):
+ moniker = None
+ with open(_client_toml_path(network_dir), "rb") as f:
+ toml_dict = tomli.load(f)
+ moniker = toml_dict["moniker"]
+ return moniker
+
+
+def _get_node_key_from_gentx(options: CommandOptions, gentx_file_name: str):
+ gentx_file_path = Path(gentx_file_name)
+ if gentx_file_path.exists():
+ with open(Path(gentx_file_name), "rb") as f:
+ parsed_json = json.load(f)
+ return parsed_json['body']['messages'][0]['delegator_address']
+ else:
+ print(f"Error: gentx file: {gentx_file_name} does not exist")
+ sys.exit(1)
+
+
+def _comma_delimited_to_list(list_str: str):
+ return list_str.split(",") if list_str else []
+
+
+def _get_node_keys_from_gentx_files(options: CommandOptions, gentx_file_list: str):
+ node_keys = []
+ gentx_files = _comma_delimited_to_list(gentx_file_list)
+ for gentx_file in gentx_files:
+ node_key = _get_node_key_from_gentx(options, gentx_file)
+ if node_key:
+ node_keys.append(node_key)
+ return node_keys
+
+
+def _copy_gentx_files(options: CommandOptions, network_dir: Path, gentx_file_list: str):
+ gentx_files = _comma_delimited_to_list(gentx_file_list)
+ for gentx_file in gentx_files:
+ gentx_file_path = Path(gentx_file)
+ copyfile(gentx_file_path, os.path.join(network_dir, "config", "gentx", os.path.basename(gentx_file_path)))
+
+
+def _remove_persistent_peers(options: CommandOptions, network_dir: Path):
+ config_file_path = _config_toml_path(network_dir)
+ if not config_file_path.exists():
+ print("Error: config.toml not found")
+ sys.exit(1)
+ with open(config_file_path, "r") as input_file:
+ config_file_content = input_file.read()
+ persistent_peers_pattern = '^persistent_peers = "(.+?)"'
+ replace_with = "persistent_peers = \"\""
+ config_file_content = re.sub(persistent_peers_pattern, replace_with, config_file_content, flags=re.MULTILINE)
+ with open(config_file_path, "w") as output_file:
+ output_file.write(config_file_content)
+
+
+def _insert_persistent_peers(options: CommandOptions, config_dir: Path, new_persistent_peers: str):
+ config_file_path = config_dir.joinpath("config.toml")
+ if not config_file_path.exists():
+ print("Error: config.toml not found")
+ sys.exit(1)
+ with open(config_file_path, "r") as input_file:
+ config_file_content = input_file.read()
+ persistent_peers_pattern = '^persistent_peers = ""'
+ replace_with = f"persistent_peers = \"{new_persistent_peers}\""
+ config_file_content = re.sub(persistent_peers_pattern, replace_with, config_file_content, flags=re.MULTILINE)
+ with open(config_file_path, "w") as output_file:
+ output_file.write(config_file_content)
+
+
+def _phase_from_params(parameters):
+ phase = SetupPhase.ILLEGAL
+ if parameters.initialize_network:
+ if parameters.join_network or parameters.create_network:
+ print("Can't supply --join-network or --create-network with --initialize-network")
+ sys.exit(1)
+ if not parameters.chain_id:
+ print("--chain-id is required")
+ sys.exit(1)
+ # node_moniker must be supplied
+ if not parameters.node_moniker:
+ print("Error: --node-moniker is required")
+ sys.exit(1)
+ phase = SetupPhase.INITIALIZE
+ elif parameters.join_network:
+ if parameters.initialize_network or parameters.create_network:
+ print("Can't supply --initialize-network or --create-network with --join-network")
+ sys.exit(1)
+ phase = SetupPhase.JOIN
+ elif parameters.create_network:
+ if parameters.initialize_network or parameters.join_network:
+ print("Can't supply --initialize-network or --join-network with --create-network")
+ sys.exit(1)
+ phase = SetupPhase.CREATE
+ return phase
+
+
+def setup(command_context: DeployCommandContext, parameters: LaconicStackSetupCommand, extra_args):
+
+ options = command_context.cluster_context.options
+
+ currency = "stake" # Does this need to be a parameter?
+
+ if options.debug:
+ print(f"parameters: {parameters}")
+
+ phase = _phase_from_params(parameters)
+
+ network_dir = Path(parameters.network_dir).absolute()
+ laconicd_home_path_in_container = "/laconicd-home"
mounts = [
- VolumeMapping("./path", "~/.laconicd")
+ VolumeMapping(network_dir, laconicd_home_path_in_container)
]
- output, status = run_container_command(command_context.cluster_context, "laconicd", f"laconicd init {node_moniker} --chain-id {chain_id}", mounts)
+
+ if phase == SetupPhase.INITIALIZE:
+
+ # We want to create the directory so if it exists that's an error
+ if os.path.exists(network_dir):
+ print(f"Error: network directory {network_dir} already exists")
+ sys.exit(1)
+
+ os.mkdir(network_dir)
+
+ output, status = run_container_command(
+ command_context,
+ "laconicd", f"laconicd init {parameters.node_moniker} --home {laconicd_home_path_in_container}\
+ --chain-id {parameters.chain_id}", mounts)
+ if options.debug:
+ print(f"Command output: {output}")
+
+ elif phase == SetupPhase.JOIN:
+ if not os.path.exists(network_dir):
+ print(f"Error: network directory {network_dir} doesn't exist")
+ sys.exit(1)
+ # Get the chain_id from the config file created in the INITIALIZE phase
+ chain_id = _get_chain_id_from_config(network_dir)
+
+ output1, status1 = run_container_command(
+ command_context, "laconicd", f"laconicd keys add {parameters.key_name} --home {laconicd_home_path_in_container}\
+ --keyring-backend test", mounts)
+ if options.debug:
+ print(f"Command output: {output1}")
+ output2, status2 = run_container_command(
+ command_context,
+ "laconicd",
+ f"laconicd add-genesis-account {parameters.key_name} 12900000000000000000000{currency}\
+ --home {laconicd_home_path_in_container} --keyring-backend test",
+ mounts)
+ if options.debug:
+ print(f"Command output: {output2}")
+ output3, status3 = run_container_command(
+ command_context,
+ "laconicd",
+ f"laconicd gentx {parameters.key_name} 90000000000{currency} --home {laconicd_home_path_in_container}\
+ --chain-id {chain_id} --keyring-backend test",
+ mounts)
+ if options.debug:
+ print(f"Command output: {output3}")
+ output4, status4 = run_container_command(
+ command_context,
+ "laconicd",
+ f"laconicd keys show {parameters.key_name} -a --home {laconicd_home_path_in_container} --keyring-backend test",
+ mounts)
+ print(f"Node validator address: {output4}")
+
+ elif phase == SetupPhase.CREATE:
+ if not os.path.exists(network_dir):
+ print(f"Error: network directory {network_dir} doesn't exist")
+ sys.exit(1)
+
+ # In the CREATE phase, we are either a "coordinator" node, generating the genesis.json file ourselves
+ # OR we are a "not-coordinator" node, consuming a genesis file we got from the coordinator node.
+ if parameters.genesis_file:
+ # We got the genesis file from elsewhere
+ # Copy it into our network dir
+ genesis_file_path = Path(parameters.genesis_file)
+ if not os.path.exists(genesis_file_path):
+ print(f"Error: supplied genesis file: {parameters.genesis_file} does not exist.")
+ sys.exit(1)
+ copyfile(genesis_file_path, os.path.join(network_dir, "config", os.path.basename(genesis_file_path)))
+ else:
+ # We're generating the genesis file
+ if not parameters.gentx_file_list:
+ print("Error: --gentx-files must be supplied")
+ sys.exit(1)
+ # First look in the supplied gentx files for the other nodes' keys
+ other_node_keys = _get_node_keys_from_gentx_files(options, parameters.gentx_file_list)
+ # Add those keys to our genesis, with balances we determine here (why?)
+ for other_node_key in other_node_keys:
+ outputk, statusk = run_container_command(
+ command_context, "laconicd", f"laconicd add-genesis-account {other_node_key} 12900000000000000000000{currency}\
+ --home {laconicd_home_path_in_container} --keyring-backend test", mounts)
+ if options.debug:
+ print(f"Command output: {outputk}")
+ # Copy the gentx json files into our network dir
+ _copy_gentx_files(options, network_dir, parameters.gentx_file_list)
+ # Now we can run collect-gentxs
+ output1, status1 = run_container_command(
+ command_context, "laconicd", f"laconicd collect-gentxs --home {laconicd_home_path_in_container}", mounts)
+ if options.debug:
+ print(f"Command output: {output1}")
+ print(f"Generated genesis file, please copy to other nodes as required: \
+ {os.path.join(network_dir, 'config', 'genesis.json')}")
+ # Last thing, collect-gentxs puts a likely bogus set of persistent_peers in config.toml so we remove that now
+ _remove_persistent_peers(options, network_dir)
+ # In both cases we validate the genesis file now
+ output2, status1 = run_container_command(
+ command_context, "laconicd", f"laconicd validate-genesis --home {laconicd_home_path_in_container}", mounts)
+ print(f"validate-genesis result: {output2}")
+
+ else:
+ print("Illegal parameters supplied")
+ sys.exit(1)
+
+
+def create(context: DeploymentContext, extra_args):
+ network_dir = extra_args[0]
+ if network_dir is None:
+ print("Error: --network-dir must be supplied")
+ sys.exit(1)
+ network_dir_path = Path(network_dir)
+ if not (network_dir_path.exists() and network_dir_path.is_dir()):
+ print(f"Error: supplied network directory does not exist: {network_dir}")
+ sys.exit(1)
+ config_dir_path = network_dir_path.joinpath("config")
+ if not (config_dir_path.exists() and config_dir_path.is_dir()):
+ print(f"Error: supplied network directory does not contain a config directory: {config_dir_path}")
+ sys.exit(1)
+ data_dir_path = network_dir_path.joinpath("data")
+ if not (data_dir_path.exists() and data_dir_path.is_dir()):
+ print(f"Error: supplied network directory does not contain a data directory: {data_dir_path}")
+ sys.exit(1)
+ # Copy the network directory contents into our deployment
+ # TODO: change this to work with non local paths
+ deployment_config_dir = context.deployment_dir.joinpath("data", "laconicd-config")
+ copytree(config_dir_path, deployment_config_dir, dirs_exist_ok=True)
+ # If supplied, add the initial persistent peers to the config file
+ if extra_args[1]:
+ initial_persistent_peers = extra_args[1]
+ _insert_persistent_peers(context.command_context.cluster_context.options, deployment_config_dir, initial_persistent_peers)
+ # Copy the data directory contents into our deployment
+ # TODO: change this to work with non local paths
+ deployment_data_dir = context.deployment_dir.joinpath("data", "laconicd-data")
+ copytree(data_dir_path, deployment_data_dir, dirs_exist_ok=True)
def init(command_context: DeployCommandContext):
- print(init_help_text)
yaml = get_yaml()
return yaml.load(default_spec_file_content)
diff --git a/app/data/stacks/mainnet-laconic/stack.yml b/app/data/stacks/mainnet-laconic/stack.yml
index 51b0b8a2..e4e6781e 100644
--- a/app/data/stacks/mainnet-laconic/stack.yml
+++ b/app/data/stacks/mainnet-laconic/stack.yml
@@ -21,6 +21,7 @@ npms:
containers:
- cerc/laconicd
- cerc/laconic-registry-cli
+ - cerc/webapp-base
- cerc/laconic-console-host
pods:
- mainnet-laconicd
diff --git a/app/data/stacks/mobymask-v2/mobymask-only.md b/app/data/stacks/mobymask-v2/mobymask-only.md
index eb8f3153..ef18d835 100644
--- a/app/data/stacks/mobymask-v2/mobymask-only.md
+++ b/app/data/stacks/mobymask-v2/mobymask-only.md
@@ -39,7 +39,7 @@ Create and update an env file to be used in the next step ([defaults](../../conf
CERC_L2_NODE_HOST=
CERC_L2_NODE_PORT=
- # URL to get CSV with credentials for accounts on L1 to perform txs on L2
+ # URL (fixturenet-eth-bootnode-lighthouse) to get CSV with credentials for accounts on L1 to perform txs on L2
CERC_L1_ACCOUNTS_CSV_URL=
# OR
@@ -60,6 +60,9 @@ Create and update an env file to be used in the next step ([defaults](../../conf
# (Optional) Set of multiaddrs to be avoided while dialling
CERC_DENY_MULTIADDRS=[]
+ # (Optional) Type of pubsub to be used
+ CERC_PUBSUB=""
+
# Set to false for disabling watcher peer to send txs to L2
CERC_ENABLE_PEER_L2_TXS=true
@@ -119,4 +122,8 @@ docker volume ls -q --filter "name=mobymask_v2"
# Remove all the listed volumes
docker volume rm $(docker volume ls -q --filter "name=mobymask_v2")
+
+# WARNING: To avoid changing peer ids for the watcher, `peers_ids` volume can be persisted
+# To delete all volumes except for `peers_ids`
+docker volume rm $(docker volume ls -q --filter "name=mobymask_v2" | grep -v "peers_ids$")
```
diff --git a/app/data/stacks/mobymask-v2/stack.yml b/app/data/stacks/mobymask-v2/stack.yml
index 53adbffa..4344d54f 100644
--- a/app/data/stacks/mobymask-v2/stack.yml
+++ b/app/data/stacks/mobymask-v2/stack.yml
@@ -6,14 +6,15 @@ repos:
- github.com/dboreham/foundry
- github.com/ethereum-optimism/optimism@v1.0.4
- github.com/ethereum-optimism/op-geth@v1.101105.2
- - github.com/cerc-io/watcher-ts@v0.2.43
- - github.com/cerc-io/mobymask-v2-watcher-ts@v0.1.2
- - github.com/cerc-io/MobyMask@v0.1.2
+ - github.com/cerc-io/watcher-ts@v0.2.56
+ - github.com/cerc-io/mobymask-v2-watcher-ts@v0.1.3
+ - github.com/cerc-io/MobyMask@v0.1.3
- github.com/cerc-io/mobymask-ui
containers:
- cerc/go-ethereum
- cerc/lighthouse
- cerc/lighthouse-cli
+ - cerc/fixturenet-eth-genesis
- cerc/fixturenet-eth-geth
- cerc/fixturenet-eth-lighthouse
- cerc/foundry
diff --git a/app/data/stacks/mobymask-v2/web-apps.md b/app/data/stacks/mobymask-v2/web-apps.md
index ade5953b..0ea62769 100644
--- a/app/data/stacks/mobymask-v2/web-apps.md
+++ b/app/data/stacks/mobymask-v2/web-apps.md
@@ -6,6 +6,12 @@ Instructions to setup and deploy MobyMask and Peer Test web apps
Prerequisite: Watcher with GQL and relay node endpoints
+Clone required repositories:
+
+```bash
+laconic-so --stack mobymask-v2 setup-repositories --include github.com/cerc-io/mobymask-ui
+```
+
Build the container images:
```bash
@@ -39,6 +45,9 @@ Create and update an env file to be used in the next step ([defaults](../../conf
# L2 Chain ID used by mobymask web-app for L2 txs
CERC_CHAIN_ID=42069
+
+ # (Optional) Type of pubsub to be used ("floodsub" | "gossipsub")
+ CERC_PUBSUB=""
```
* NOTE: If watcher is running on the host machine, use `host.docker.internal` as the hostname to access the host port
diff --git a/app/data/stacks/mobymask-v3/README.md b/app/data/stacks/mobymask-v3/README.md
new file mode 100644
index 00000000..6ea57af5
--- /dev/null
+++ b/app/data/stacks/mobymask-v3/README.md
@@ -0,0 +1,6 @@
+# MobyMask v3
+
+Instructions to setup and deploy MobyMask v3 stack (watcher + web-app) using [laconic-stack-orchestrator](/README.md#install)
+
+* Follow [watcher.md](./watcher.md) for deploying the watcher
+* Follow [web-app.md](./web-app.md) for deploying the app
diff --git a/app/data/stacks/mobymask-v3/stack.yml b/app/data/stacks/mobymask-v3/stack.yml
new file mode 100644
index 00000000..b07b3680
--- /dev/null
+++ b/app/data/stacks/mobymask-v3/stack.yml
@@ -0,0 +1,19 @@
+version: "1.0"
+description: "MobyMask v3 stack"
+name: mobymask-v3
+repos:
+ - github.com/cerc-io/ts-nitrov0.1.12
+ - github.com/cerc-io/watcher-ts@v0.2.57
+ - github.com/cerc-io/mobymask-v2-watcher-ts@v3 # TODO: Update after fixes
+ - github.com/cerc-io/MobyMask@v0.1.3
+ - github.com/cerc-io/mobymask-ui@v0.2.0
+containers:
+ - cerc/nitro-contracts
+ - cerc/watcher-ts
+ - cerc/watcher-mobymask-v3
+ - cerc/mobymask
+ - cerc/mobymask-ui
+pods:
+ - nitro-contracts
+ - watcher-mobymask-v3
+ - mobymask-app-v3
diff --git a/app/data/stacks/mobymask-v3/watcher.md b/app/data/stacks/mobymask-v3/watcher.md
new file mode 100644
index 00000000..1aa63c1d
--- /dev/null
+++ b/app/data/stacks/mobymask-v3/watcher.md
@@ -0,0 +1,133 @@
+# MobyMask v3 Watcher
+
+## Setup
+
+Prerequisite: L2 Optimism Geth RPC endpoint
+
+Clone required repositories:
+
+```bash
+laconic-so --stack mobymask-v3 setup-repositories --pull --exclude github.com/cerc-io/mobymask-ui
+```
+
+Build the container images:
+
+```bash
+laconic-so --stack mobymask-v3 build-containers --exclude cerc/mobymask-ui
+```
+
+## Deploy
+
+### Configuration
+
+Create and update an env file to be used in the next step ([defaults](../../config/watcher-mobymask-v3/mobymask-params.env)):
+
+ ```bash
+ # External ETH RPC endpoint (L2 Optimism geth)
+ CERC_ETH_RPC_ENDPOINT=
+
+ # External ETH RPC endpoint used for queries in the watcher
+ CERC_ETH_RPC_QUERY_ENDPOINT=
+
+ # External ETH RPC endpoint used for mutations in the watcher
+ CERC_ETH_RPC_MUTATION_ENDPOINT=
+
+ # Specify the an account PK for contract deployment
+ CERC_PRIVATE_KEY_DEPLOYER=
+
+ # Base URI for mobymask-app
+ # (used for generating a root invite link after deploying the contract)
+ CERC_MOBYMASK_APP_BASE_URI="http://127.0.0.1:3004/#"
+
+ # (Optional) Domain to be used in the relay node's announce address
+ CERC_RELAY_ANNOUNCE_DOMAIN=
+
+ # (Optional) Set of relay peers to connect to from the relay node
+ CERC_RELAY_PEERS=[]
+
+ # (Optional) Set of multiaddrs to be avoided while dialling
+ CERC_DENY_MULTIADDRS=[]
+
+ # (Optional) Type of pubsub to be used
+ CERC_PUBSUB=""
+
+ # Set to false for disabling watcher peer to send txs to L2
+ CERC_ENABLE_PEER_L2_TXS=true
+
+ # (Optional) Set already deployed MobyMask contract address to avoid deploying contract in the stack
+ CERC_DEPLOYED_CONTRACT=
+
+ # (Optional) Set already deployed Nitro addresses to avoid deploying them in the stack
+ CERC_NA_ADDRESS=
+ CERC_VPA_ADDRESS=
+ CERC_CA_ADDRESS=
+
+ # Specify private key of a funded account for sending txs to L2
+ CERC_PRIVATE_KEY_PEER=
+
+ # Specify private key for the Nitro account
+ CERC_WATCHER_NITRO_PK=
+
+ # (Optional) Set a pre-existing peer id to be used (enables consensus)
+ # Uses a generated peer id if not set (disables consensus)
+ CERC_PEER_ID=
+
+ # Disable payments to upstream ETH server
+ CERC_ENABLE_UPSTREAM_PAYMENTS=false
+ ```
+
+* NOTE: If Optimism is running on the host machine, use `host.docker.internal` as the hostname to access the host port
+
+### Deploy the stack
+
+```bash
+laconic-so --stack mobymask-v3 deploy --cluster mobymask_v3 --exclude mobymask-app-v3 --env-file up
+```
+
+* To list down and monitor the running containers:
+
+ ```bash
+ laconic-so --stack mobymask-v3 deploy --cluster mobymask_v3 --exclude mobymask-app-v3 ps
+
+ # With status
+ docker ps -a
+
+ # Check logs for a container
+ docker logs -f
+ ```
+
+* The watcher endpoint is exposed on host port `3001` and the relay node endpoint is exposed on host port `9090`
+
+* Check the logs of the MobyMask contract deployment container to get the deployed contract's address and generated root invite link:
+
+ ```bash
+ docker logs -f $(docker ps -aq --filter name="mobymask-1")
+ ```
+
+* Check logs of the Nitro contracts container to get the deployed Nitro contracts' addresses:
+
+ ```bash
+ docker exec -it $(docker ps -q --filter name="nitro-contracts") bash -c "cat /app/deployment/nitro-addresses.json"
+ ```
+
+## Clean up
+
+Stop all services running in the background:
+
+```bash
+laconic-so --stack mobymask-v3 deploy --cluster mobymask_v3 --exclude mobymask-app-v3 down
+```
+
+Clear volumes created by this stack:
+
+```bash
+# List all relevant volumes
+docker volume ls -q --filter "name=mobymask_v3"
+
+# Remove all the listed volumes
+docker volume rm $(docker volume ls -q --filter "name=mobymask_v3")
+
+# WARNING: To avoid changing peer ids for the watcher, `peers_ids` volume can be persisted
+# To delete all volumes except for `peers_ids`
+docker volume rm $(docker volume ls -q --filter "name=mobymask_v3" | grep -v "peers_ids$")
+```
diff --git a/app/data/stacks/mobymask-v3/web-app.md b/app/data/stacks/mobymask-v3/web-app.md
new file mode 100644
index 00000000..12674cee
--- /dev/null
+++ b/app/data/stacks/mobymask-v3/web-app.md
@@ -0,0 +1,89 @@
+# MobyMask v3 App
+
+## Setup
+
+Prerequisite: Watcher with GQL and relay node endpoints
+
+Clone required repositories:
+
+```bash
+laconic-so --stack mobymask-v3 setup-repositories --pull --include github.com/cerc-io/mobymask-ui
+```
+
+Build the container images:
+
+```bash
+laconic-so --stack mobymask-v3 build-containers --include cerc/mobymask-ui
+```
+
+## Deploy
+
+### Configuration
+
+Create and update an env file to be used in the next step ([defaults](../../config/watcher-mobymask-v3/mobymask-params.env)):
+
+ ```bash
+ # Set of relay nodes to be used by the web-app
+ # (use double quotes " for strings, avoid space after commas)
+ # Eg. CERC_RELAY_NODES=["/dns4/example.com/tcp/443/wss/p2p/12D3KooWGHmDDCc93XUWL16FMcTPCGu2zFaMkf67k8HZ4gdQbRDr"]
+ CERC_RELAY_NODES=[]
+
+ # Set of multiaddrs to be avoided while dialling
+ CERC_DENY_MULTIADDRS=[]
+
+ # Also add if running MobyMask app:
+
+ # Watcher endpoint used by the app for GQL queries
+ CERC_APP_WATCHER_URL="http://127.0.0.1:3001"
+
+ # Set deployed MobyMask contract address to be used in MobyMask app's config
+ CERC_DEPLOYED_CONTRACT=
+
+ # L2 Chain ID used by mobymask web-app for L2 txs
+ CERC_CHAIN_ID=42069
+
+ # (Optional) Type of pubsub to be used ("floodsub" | "gossipsub")
+ CERC_PUBSUB=""
+
+ # (Optional) Set of direct peers to be used when pubsub is set to gossipsub
+ CERC_GOSSIPSUB_DIRECT_PEERS=[]
+
+ # Set Nitro addresses
+ CERC_NA_ADDRESS=
+ CERC_VPA_ADDRESS=
+ CERC_CA_ADDRESS=
+
+ # Nitro account address to make the query and mutation payments to
+ CERC_PAYMENT_NITRO_ADDRESS=
+
+ # (Optional) Endpoint for Mobymask snap installation
+ CERC_SNAP_URL=
+ ```
+
+### Deploy the stack
+
+```bash
+laconic-so --stack mobymask-v3 deploy --cluster mobymask_v3 --include mobymask-app-v3 --env-file up
+
+# Runs the MobyMask v3 app on host port 3004
+```
+
+To list down and monitor the running containers:
+
+```bash
+laconic-so --stack mobymask-v3 deploy --cluster mobymask_v3 --include mobymask-app-v3 ps
+
+# With status
+docker ps -a
+
+# Check logs for a container
+docker logs -f
+```
+
+## Clean up
+
+Stop all services running in the background:
+
+```bash
+laconic-so --stack mobymask-v3 deploy --cluster mobymask_v3 --include mobymask-app-v3 down
+```
diff --git a/app/data/stacks/mobymask/README.md b/app/data/stacks/mobymask/README.md
index 28d55ea1..ef222cce 100644
--- a/app/data/stacks/mobymask/README.md
+++ b/app/data/stacks/mobymask/README.md
@@ -1,45 +1,56 @@
# MobyMask
-The MobyMask watcher is a Laconic Network component that provides efficient access to MobyMask contract data from Ethereum, along with evidence allowing users to verify the correctness of that data. The watcher source code is available in [this repository](https://github.com/cerc-io/watcher-ts/tree/main/packages/mobymask-watcher) and a developer-oriented Docker Compose setup for the watcher can be found [here](https://github.com/cerc-io/mobymask-watcher). The watcher can be deployed automatically using the Laconic Stack Orchestrator tool as detailed below:
+The MobyMask watcher is a Laconic Network component that provides efficient access to MobyMask contract data from Ethereum, along with evidence allowing users to verify the correctness of that data. The watcher source code is available in [this repository](https://github.com/cerc-io/mobymask-watcher-ts) and a developer-oriented Docker Compose setup for the watcher can be found [here](https://github.com/cerc-io/mobymask-watcher). The watcher can be deployed automatically using the Laconic Stack Orchestrator tool as detailed below:
## Deploy the MobyMask Watcher
-The instructions below show how to deploy a MobyMask watcher using laconic-stack-orchestrator (the installation of which is covered [here](https://github.com/cerc-io/stack-orchestrator#user-mode)).
+The instructions below show how to deploy a MobyMask watcher using laconic-stack-orchestrator (the installation of which is covered [here](https://github.com/cerc-io/stack-orchestrator#install)).
This deployment expects that ipld-eth-server's endpoints are available on the local machine at http://ipld-eth-server.example.com:8083/graphql and http://ipld-eth-server.example.com:8082. More advanced configurations are supported by modifying the watcher's [config file](../../config/watcher-mobymask/mobymask-watcher.toml).
## Clone required repositories
-```
-$ laconic-so setup-repositories --include github.com/cerc-io/watcher-ts
+```bash
+$ laconic-so --stack mobymask setup-repositories
```
## Build the watcher container
-```
-$ laconic-so build-containers --include cerc/watcher-mobymask
+```bash
+$ laconic-so --stack mobymask build-containers
```
This should create a container with tag `cerc/watcher-mobymask` in the local image registry.
-## Deploy the stack
+## Create a deployment
+
+```bash
+$ laconic-so --stack mobymask deploy init --output mobymask-spec.yml
+$ laconic-so deploy create --spec-file mobymask-spec.yml --deployment-dir mobymask-deployment
+```
+
+External `ipld-eth-server` endpoint can be set in watcher config file in the deployment directory:
+```
+mobymask-deployment/config/watcher-mobymask/mobymask-watcher.toml
+```
+
+## Start the stack
First the watcher database has to be initialized. Start only the mobymask-watcher-db service:
-```
-$ laconic-so deploy-system --include watcher-mobymask up mobymask-watcher-db
+```bash
+$ laconic-so deployment --dir mobymask-deployment start mobymask-watcher-db
```
Next find the container's id using `docker ps` then run the following command to initialize the database:
-
-```
-$ docker exec -i psql -U vdbm mobymask-watcher < config/watcher-mobymask/mobymask-watcher-db.sql
+```bash
+$ docker exec -i psql -U vdbm mobymask-watcher < mobymask-deployment/config/watcher-mobymask/mobymask-watcher-db.sql
```
Finally start the remaining containers:
-```
-$ laconic-so deploy-system --include watcher-mobymask up
+```bash
+$ laconic-so deployment --dir mobymask-deployment start
```
Correct operation should be verified by following the instructions [here](https://github.com/cerc-io/mobymask-watcher/tree/main/mainnet-watcher-only#run), checking GraphQL queries return valid results in the watcher's [playground](http://127.0.0.1:3001/graphql).
@@ -49,5 +60,26 @@ Correct operation should be verified by following the instructions [here](https:
Stop all the services running in background:
```bash
-$ laconic-so deploy-system --include watcher-mobymask down
+$ laconic-so deployment --dir mobymask-deployment stop
```
+
+## Data volumes
+
+Container data volumes are bind-mounted to specified paths in the host filesystem.
+The default setup (generated by `laconic-so deploy init`) places the volumes in the `./data` subdirectory of the deployment directory:
+```
+$ cat mobymask-spec.yml
+stack: mobymask
+ports:
+ mobymask-watcher-db:
+ - 0.0.0.0:15432:5432
+ mobymask-watcher-job-runner:
+ - 0.0.0.0:9000:9000
+ mobymask-watcher-server:
+ - 0.0.0.0:3001:3001
+ - 0.0.0.0:9001:9001
+volumes:
+ mobymask_watcher_db_data: ./data/mobymask_watcher_db_data
+```
+
+The directory can be changed before `laconic-so deploy create`
diff --git a/app/data/stacks/mobymask/stack.yml b/app/data/stacks/mobymask/stack.yml
index 12799821..22a30a0f 100644
--- a/app/data/stacks/mobymask/stack.yml
+++ b/app/data/stacks/mobymask/stack.yml
@@ -1,7 +1,7 @@
version: "1.0"
name: mobymask-watcher
repos:
- - github.com/cerc-io/watcher-ts/v0.2.19
+ - github.com/cerc-io/mobymask-watcher-ts@v0.1.0
containers:
- cerc/watcher-mobymask
pods:
diff --git a/app/data/stacks/package-registry/stack.yml b/app/data/stacks/package-registry/stack.yml
index cadc3e97..9d75925f 100644
--- a/app/data/stacks/package-registry/stack.yml
+++ b/app/data/stacks/package-registry/stack.yml
@@ -1,6 +1,6 @@
version: "1.1"
name: package-registry
-decription: "Local Package Registry"
+description: "Local Package Registry"
repos:
- github.com/cerc-io/hosting
- gitea.com/gitea/act_runner
diff --git a/app/data/stacks/reth/stack.yml b/app/data/stacks/reth/stack.yml
index 7d2278db..4ff8b60d 100644
--- a/app/data/stacks/reth/stack.yml
+++ b/app/data/stacks/reth/stack.yml
@@ -1,6 +1,6 @@
version: "1.1"
name: reth
-decription: "Reth node"
+description: "Reth node"
repos:
- github.com/paradigmxyz/reth
containers:
diff --git a/app/data/stacks/sushiswap-subgraph/README.md b/app/data/stacks/sushiswap-subgraph/README.md
index b341a1a3..52433a9c 100644
--- a/app/data/stacks/sushiswap-subgraph/README.md
+++ b/app/data/stacks/sushiswap-subgraph/README.md
@@ -1,11 +1,27 @@
-# SushiSwap Graph
+# SushiSwap Subgraph
## Setup
Clone required repositories:
```bash
-laconic-so --stack sushiswap-subgraph setup-repositories
+laconic-so --stack sushiswap-subgraph setup-repositories --pull
+```
+
+Checkout to a non-default branch in the cloned repos if required:
+
+```bash
+# Default repo base dir
+cd ~/cerc
+
+# Example
+cd graph-node
+git checkout && git pull
+
+# Remove the corresponding docker image if it already exists
+docker image rm cerc/graph-node:local
+# Remove any dangling images
+docker image prune
```
Build the container images:
@@ -14,98 +30,141 @@ Build the container images:
laconic-so --stack sushiswap-subgraph build-containers
```
-## Deploy
+## Create a deployment
+
+Initialize deployment and create "spec" file:
+
+```bash
+laconic-so --stack sushiswap-subgraph deploy init --output sushiswap-subgraph-spec.yml
+```
+
+We need to assign a fixed port `8000` for graph-node subgraph GQL endpoint. The values can be
+customized by editing the "spec" file generated by `laconic-so deploy init`.
+```
+$ cat sushiswap-subgraph-spec.yml
+stack: sushiswap-subgraph
+ports:
+ graph-node:
+ - '8000:8000'
+ - '8001'
+ - '8020'
+ - '8030'
+...
+```
+
+Create deployment:
+
+```bash
+laconic-so deploy create --spec-file sushiswap-subgraph-spec.yml --deployment-dir sushiswap-subgraph-deployment
+```
+
+## Start the stack
Deploy the stack:
```bash
-laconic-so --stack sushiswap-subgraph deploy --cluster sushigraph up
+laconic-so deployment --dir sushiswap-subgraph-deployment start
+
+# Note: Remove any existing volumes for the cluster for a fresh start
```
-After all services have started, wait and check that the subgraph has been deployed to graph-node
+After all services have started:
+
+* Follow `graph-node` logs:
+
+ ```bash
+ laconic-so deployment --dir sushiswap-subgraph-deployment logs -f graph-node
+ ```
+
+* Check that the subgraphs have been deployed:
+
+ ```bash
+ laconic-so deployment --dir sushiswap-subgraph-deployment logs -f sushiswap-subgraph-v3
+
+ # Expected output:
+ # .
+ # .
+ # sushigraph-sushiswap-subgraph-v3-1 | - Deploying to Graph node http://graph-node:8020/
+ # sushigraph-sushiswap-subgraph-v3-1 | Deployed to http://graph-node:8000/subgraphs/name/sushiswap/blocks/graphql
+ # sushigraph-sushiswap-subgraph-v3-1 |
+ # sushigraph-sushiswap-subgraph-v3-1 |
+ # sushigraph-sushiswap-subgraph-v3-1 | Subgraph endpoints:
+ # sushigraph-sushiswap-subgraph-v3-1 | Queries (HTTP): http://graph-node:8000/subgraphs/name/sushiswap/blocks
+ # .
+ # .
+ # sushigraph-sushiswap-subgraph-v3-1 | - Deploying to Graph node http://graph-node:8020/
+ # sushigraph-sushiswap-subgraph-v3-1 | Deployed to http://graph-node:8000/subgraphs/name/sushiswap/v3-filecoin/graphql
+ # sushigraph-sushiswap-subgraph-v3-1 |
+ # sushigraph-sushiswap-subgraph-v3-1 |
+ # sushigraph-sushiswap-subgraph-v3-1 | Subgraph endpoints:
+ # sushigraph-sushiswap-subgraph-v3-1 | Queries (HTTP): http://graph-node:8000/subgraphs/name/sushiswap/v3-filecoin
+ # sushigraph-sushiswap-subgraph-v3-1 |
+ # sushigraph-sushiswap-subgraph-v3-1 |
+ # sushigraph-sushiswap-subgraph-v3-1 | Done
+ ```
+
+After `graph-node` has fetched the latest blocks from upstream, use the subgraph (GQL) endpoints for querying:
```bash
-laconic-so --stack sushiswap-subgraph deploy --cluster sushigraph logs -f sushiswap-subgraph-v3
+# Blocks subgraph endpoint:
+http://127.0.0.1:8000/subgraphs/name/sushiswap/blocks/graphql
-# Expected end output
-# ...
-# sushigraph-sushiswap-subgraph-v3-1 | - Deploying to Graph node http://graph-node:8020/
-# sushigraph-sushiswap-subgraph-v3-1 | Deployed to http://graph-node:8000/subgraphs/name/sushiswap/v3-lotus/graphql
-# sushigraph-sushiswap-subgraph-v3-1 |
-# sushigraph-sushiswap-subgraph-v3-1 | Subgraph endpoints:
-# sushigraph-sushiswap-subgraph-v3-1 | Queries (HTTP): http://graph-node:8000/subgraphs/name/sushiswap/v3-lotus
-# sushigraph-sushiswap-subgraph-v3-1 |
-# sushigraph-sushiswap-subgraph-v3-1 | Done
+# v3 subgraph endpoint:
+http://127.0.0.1:8000/subgraphs/name/sushiswap/v3-filecoin/graphql
```
-## Run
+## Set environment variables
-To check graph-node logs:
-```bash
-laconic-so --stack sushiswap-subgraph deploy --cluster sushigraph logs -f graph-node
-```
+* The graph-node environment variable `ETHEREUM_REORG_THRESHOLD` can be set in the deployment compose file
+ ```bash
+ $ cat sushiswap-subgraph-deployment/compose/docker-compose-graph-node.yml
+ services:
+ graph-node:
+ image: cerc/graph-node:local
+ ...
+ environment:
+ ...
+ GRAPH_LOG: debug
+ ETHEREUM_REORG_THRESHOLD: 16
+ ```
+ Change `ETHEREUM_REORG_THRESHOLD` to desired value
-To deploy tokens run:
-```bash
-docker exec -it sushigraph-sushiswap-v3-periphery-1 yarn hardhat --network docker deploy --tags TestERC20
-```
-This can be run multiple times to deploy ERC20 tokens
-
-Take note of the deployed token addresses to use later
-
-Get contract address of factory deployed:
-```bash
-docker exec -it sushigraph-sushiswap-v3-core-1 jq -r '.address' /app/deployments/docker/UniswapV3Factory.json
-```
-Set it to environment variable `FACTORY_ADDRESS` to use later
-
-To create a pool:
-```bash
-docker exec -it sushigraph-sushiswap-v3-core-1 pnpm run pool:create:docker --factory $FACTORY_ADDRESS --token0 $TOKEN1_ADDRESS --token1 $TOKEN2_ADDRESS --fee 500
-```
-
-Set the created pool address to environment variable `POOL_ADDRESS` to use later
-
-To initialize pool:
-```bash
-docker exec -it sushigraph-sushiswap-v3-core-1 pnpm run pool:initialize:docker --sqrt-price 4295128939 --pool $POOL_ADDRESS
-```
-
-Set the recipient address to the contract deployer:
-```bash
-export RECIPIENT=0xD375B03bd3A2434A9f675bEC4Ccd68aC5e67C743
-```
-
-Trigger pool mint event:
-```bash
-docker exec -it sushigraph-sushiswap-v3-core-1 pnpm run pool:mint:docker --pool $POOL_ADDRESS --recipient $RECIPIENT --amount 10
-```
-
-Trigger pool burn event:
-```bash
-docker exec -it sushigraph-sushiswap-v3-core-1 pnpm run pool:burn:docker --pool $POOL_ADDRESS --amount 10
-```
+ * To restart graph-node with updated values, we need to restart only graph-node compose services
+ * Comment `sushiswap-subgraph-v3` pod in stack.yml so that subgraphs are not deployed again
+ ```bash
+ $ cat sushiswap-subgraph-deployment/stack.yml
+ version: "1.0"
+ name: sushiswap-subgraph
+ ...
+ pods:
+ - graph-node
+ # - sushiswap-subgraph-v3
+ ```
+ * Stop the stack first
+ ```bash
+ laconic-so deployment --dir sushiswap-subgraph-deployment stop
+ ```
+ * Start the stack again (will not start `sushiswap-subgraph-v3` pod)
+ ```
+ laconic-so deployment --dir sushiswap-subgraph-deployment start
+ ```
+ * To check if environment variable has been updated in graph-node container
+ ```bash
+ $ laconic-so deployment --dir sushiswap-subgraph-deployment exec graph-node bash
+ root@dc4d3abe1615:/# echo $ETHEREUM_REORG_THRESHOLD
+ 16
+ ```
## Clean up
Stop all the services running in background run:
```bash
-laconic-so --stack sushiswap-subgraph deploy --cluster sushigraph down
+laconic-so deployment --dir sushiswap-subgraph-deployment stop
```
Clear volumes created by this stack:
```bash
-# List all relevant volumes
-docker volume ls -q --filter "name=sushigraph"
-
-# Remove all the listed volumes
-docker volume rm $(docker volume ls -q --filter "name=sushigraph")
-
-# WARNING: After removing volumes with Lotus params
-# They will be downloaded again on restart
-
-# To remove volumes that do not contain Lotus params
-docker volume rm $(docker volume ls -q --filter "name=sushigraph" | grep -v "params$")
+laconic-so deployment --dir sushiswap-subgraph-deployment stop --delete-volumes
```
diff --git a/app/data/stacks/sushiswap-subgraph/stack.yml b/app/data/stacks/sushiswap-subgraph/stack.yml
index e4b6915d..149ffe1e 100644
--- a/app/data/stacks/sushiswap-subgraph/stack.yml
+++ b/app/data/stacks/sushiswap-subgraph/stack.yml
@@ -1,28 +1,16 @@
version: "1.0"
name: sushiswap-subgraph
-description: "An end-to-end SushiSwap Subgraph stack"
+description: "SushiSwap Subgraph stack"
repos:
- ## fixturenet-lotus repo
- - github.com/filecoin-project/lotus
- ## graph-node repo
+ # graph-node repo
- github.com/graphprotocol/graph-node
- ## sushiswap repos
- - github.com/cerc-io/sushiswap-v3-core@watcher-ts
- - github.com/cerc-io/sushiswap-v3-periphery@watcher-ts
- ## subgraph repo
+ # sushiswap subgraph repo
- github.com/sushiswap/subgraphs
containers:
- ## fixturenet-lotus image
- - cerc/lotus
- ## fixturenet-graph-node image
+ # graph-node image
- cerc/graph-node
- ## sushiswap contract deployment images
- - cerc/sushiswap-v3-core
- - cerc/sushiswap-v3-periphery
- ## sushiswap subgraphs image
+ # sushiswap subgraphs image
- cerc/sushiswap-subgraphs
pods:
- - fixturenet-lotus
- - fixturenet-graph-node
- - contract-sushiswap
+ - graph-node
- sushiswap-subgraph-v3
diff --git a/app/data/stacks/test/deploy/commands.py b/app/data/stacks/test/deploy/commands.py
index d8fb557f..0a836037 100644
--- a/app/data/stacks/test/deploy/commands.py
+++ b/app/data/stacks/test/deploy/commands.py
@@ -1,4 +1,4 @@
-# Copyright © 2022, 2023 Cerc
+# Copyright © 2022, 2023 Vulcanize
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
@@ -14,22 +14,19 @@
# along with this program. If not, see .
from app.util import get_yaml
-from app.deploy_types import DeployCommandContext, DeploymentContext
+from app.deploy_types import DeployCommandContext
from app.stack_state import State
from app.deploy_util import VolumeMapping, run_container_command
-import os
from pathlib import Path
default_spec_file_content = """config:
config_variable: test-value
"""
-init_help_text = """Add helpful text here on setting config variables.
-"""
# Output a known string to a know file in the bind mounted directory ./container-output-dir
# for test purposes -- test checks that the file was written.
-def setup(command_context: DeployCommandContext, extra_args):
+def setup(command_context: DeployCommandContext, parameters, extra_args):
host_directory = "./container-output-dir"
host_directory_absolute = Path(extra_args[0]).absolute().joinpath(host_directory)
host_directory_absolute.mkdir(parents=True, exist_ok=True)
@@ -40,12 +37,11 @@ def setup(command_context: DeployCommandContext, extra_args):
def init(command_context: DeployCommandContext):
- print(init_help_text)
yaml = get_yaml()
return yaml.load(default_spec_file_content)
-def create(command_context: DeployCommandContext):
+def create(command_context: DeployCommandContext, extra_args):
data = "create-command-output-data"
output_file_path = command_context.deployment_dir.joinpath("create-file")
with open(output_file_path, 'w+') as output_file:
diff --git a/app/deploy.py b/app/deploy.py
index 137935fd..51749ff9 100644
--- a/app/deploy.py
+++ b/app/deploy.py
@@ -1,4 +1,4 @@
-# Copyright © 2022, 2023 Cerc
+# Copyright © 2022, 2023 Vulcanize
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
@@ -140,8 +140,8 @@ def exec_operation(ctx, extra_args):
print(f"Running compose exec {service_name} {command_to_exec}")
try:
ctx.obj.docker.compose.execute(service_name, command_to_exec, envs=container_exec_env)
- except DockerException as error:
- print(f"container command returned error exit status")
+ except DockerException:
+ print("container command returned error exit status")
def logs_operation(ctx, tail: int, follow: bool, extra_args: str):
@@ -312,7 +312,7 @@ def _make_cluster_context(ctx, stack, include, exclude, cluster, env_file):
if ctx.verbose:
print(f"files: {compose_files}")
- return ClusterContext(cluster, compose_files, pre_start_commands, post_start_commands, cluster_config, env_file)
+ return ClusterContext(ctx, cluster, compose_files, pre_start_commands, post_start_commands, cluster_config, env_file)
def _convert_to_new_format(old_pod_array):
diff --git a/app/deploy_types.py b/app/deploy_types.py
index c6df5784..63f32762 100644
--- a/app/deploy_types.py
+++ b/app/deploy_types.py
@@ -1,4 +1,4 @@
-# Copyright © 2023 Cerc
+# Copyright © 2023 Vulcanize
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
@@ -17,9 +17,12 @@ from typing import List
from dataclasses import dataclass
from pathlib import Path
from python_on_whales import DockerClient
+from app.command_types import CommandOptions
+
@dataclass
class ClusterContext:
+ options: CommandOptions # TODO: this should be in its own object not stuffed in here
cluster: str
compose_files: List[str]
pre_start_commands: List[str]
@@ -45,3 +48,21 @@ class DeploymentContext:
class VolumeMapping:
host_path: str
container_path: str
+
+
+@dataclass
+class LaconicStackSetupCommand:
+ chain_id: str
+ node_moniker: str
+ key_name: str
+ initialize_network: bool
+ join_network: bool
+ create_network: bool
+ gentx_file_list: str
+ genesis_file: str
+ network_dir: str
+
+
+@dataclass
+class LaconicStackCreateCommand:
+ network_dir: str
diff --git a/app/deploy_util.py b/app/deploy_util.py
index 814f8001..2f5f0188 100644
--- a/app/deploy_util.py
+++ b/app/deploy_util.py
@@ -1,4 +1,4 @@
-# Copyright © 2022, 2023 Cerc
+# Copyright © 2022, 2023 Vulcanize
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
@@ -15,12 +15,11 @@
import os
from typing import List
-from dataclasses import dataclass
from app.deploy_types import DeployCommandContext, VolumeMapping
from app.util import get_parsed_stack_config, get_yaml, get_compose_file_dir
-def _container_image_from_service(stack:str, service: str):
+def _container_image_from_service(stack: str, service: str):
# Parse the compose files looking for the image name of the specified service
image_name = None
parsed_stack = get_parsed_stack_config(stack)
@@ -39,7 +38,7 @@ def _container_image_from_service(stack:str, service: str):
def _volumes_to_docker(mounts: List[VolumeMapping]):
-# Example from doc: [("/", "/host"), ("/etc/hosts", "/etc/hosts", "rw")]
+ # Example from doc: [("/", "/host"), ("/etc/hosts", "/etc/hosts", "rw")]
result = []
for mount in mounts:
docker_volume = (mount.host_path, mount.container_path)
@@ -51,6 +50,13 @@ def run_container_command(ctx: DeployCommandContext, service: str, command: str,
docker = ctx.docker
container_image = _container_image_from_service(ctx.stack, service)
docker_volumes = _volumes_to_docker(mounts)
- docker_output = docker.run(container_image, ["-c", command], entrypoint="bash", volumes=docker_volumes)
+ if ctx.cluster_context.options.debug:
+ print(f"Running this command in {service} container: {command}")
+ docker_output = docker.run(
+ container_image,
+ ["-c", command], entrypoint="sh",
+ user=f"{os.getuid()}:{os.getgid()}",
+ volumes=docker_volumes
+ )
# There doesn't seem to be a way to get an exit code from docker.run()
return (docker_output, 0)
diff --git a/app/deployment.py b/app/deployment.py
index 39a4ed1c..8c860e46 100644
--- a/app/deployment.py
+++ b/app/deployment.py
@@ -1,4 +1,4 @@
-# Copyright © 2022, 2023 Cerc
+# Copyright © 2022, 2023 Vulcanize
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
@@ -17,8 +17,8 @@ import click
from dataclasses import dataclass
from pathlib import Path
import sys
-from app.deploy import up_operation, down_operation, ps_operation, port_operation, exec_operation, logs_operation, create_deploy_context
-from app.util import global_options
+from app.deploy import up_operation, down_operation, ps_operation, port_operation
+from app.deploy import exec_operation, logs_operation, create_deploy_context
@dataclass
@@ -30,6 +30,8 @@ class DeploymentContext:
@click.option("--dir", required=True, help="path to deployment directory")
@click.pass_context
def command(ctx, dir):
+ '''create a deployment'''
+
# Check that --stack wasn't supplied
if ctx.parent.obj.stack:
print("Error: --stack can't be supplied with the deployment command")
diff --git a/app/deployment_create.py b/app/deployment_create.py
index 16202284..837aaa61 100644
--- a/app/deployment_create.py
+++ b/app/deployment_create.py
@@ -1,4 +1,4 @@
-# Copyright © 2022, 2023 Cerc
+# Copyright © 2022, 2023 Vulcanize
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
@@ -14,19 +14,21 @@
# along with this program. If not, see .
import click
-from dataclasses import dataclass
from importlib import util
import os
from pathlib import Path
+import random
from shutil import copyfile, copytree
import sys
-from app.util import get_stack_file_path, get_parsed_deployment_spec, get_parsed_stack_config, global_options, get_yaml, get_compose_file_dir
-from app.deploy_types import DeploymentContext, DeployCommandContext
+from app.util import get_stack_file_path, get_parsed_deployment_spec, get_parsed_stack_config, global_options, get_yaml
+from app.util import get_compose_file_dir
+from app.deploy_types import DeploymentContext, LaconicStackSetupCommand
def _make_default_deployment_dir():
return "deployment-001"
+
def _get_ports(stack):
ports = {}
parsed_stack = get_parsed_stack_config(stack)
@@ -39,9 +41,10 @@ def _get_ports(stack):
for svc_name, svc in parsed_pod_file["services"].items():
if "ports" in svc:
# Ports can appear as strings or numbers. We normalize them as strings.
- ports[svc_name] = [ str(x) for x in svc["ports"] ]
+ ports[svc_name] = [str(x) for x in svc["ports"]]
return ports
+
def _get_named_volumes(stack):
# Parse the compose files looking for named volumes
named_volumes = []
@@ -76,30 +79,30 @@ def _create_bind_dir_if_relative(volume, path_string, compose_dir):
# See: https://stackoverflow.com/questions/45699189/editing-docker-compose-yml-with-pyyaml
def _fixup_pod_file(pod, spec, compose_dir):
- # Fix up volumes
- if "volumes" in spec:
- spec_volumes = spec["volumes"]
- if "volumes" in pod:
- pod_volumes = pod["volumes"]
- for volume in pod_volumes.keys():
- if volume in spec_volumes:
- volume_spec = spec_volumes[volume]
- volume_spec_fixedup = volume_spec if Path(volume_spec).is_absolute() else f".{volume_spec}"
- _create_bind_dir_if_relative(volume, volume_spec, compose_dir)
- new_volume_spec = {"driver": "local",
- "driver_opts": {
+ # Fix up volumes
+ if "volumes" in spec:
+ spec_volumes = spec["volumes"]
+ if "volumes" in pod:
+ pod_volumes = pod["volumes"]
+ for volume in pod_volumes.keys():
+ if volume in spec_volumes:
+ volume_spec = spec_volumes[volume]
+ volume_spec_fixedup = volume_spec if Path(volume_spec).is_absolute() else f".{volume_spec}"
+ _create_bind_dir_if_relative(volume, volume_spec, compose_dir)
+ new_volume_spec = {"driver": "local",
+ "driver_opts": {
"type": "none",
"device": volume_spec_fixedup,
"o": "bind"
}
- }
- pod["volumes"][volume] = new_volume_spec
- # Fix up ports
- if "ports" in spec:
- spec_ports = spec["ports"]
- for container_name, container_ports in spec_ports.items():
- if container_name in pod["services"]:
- pod["services"][container_name]["ports"] = container_ports
+ }
+ pod["volumes"][volume] = new_volume_spec
+ # Fix up ports
+ if "ports" in spec:
+ spec_ports = spec["ports"]
+ for container_name, container_ports in spec_ports.items():
+ if container_name in pod["services"]:
+ pod["services"][container_name]["ports"] = container_ports
def call_stack_deploy_init(deploy_command_context):
@@ -107,34 +110,43 @@ def call_stack_deploy_init(deploy_command_context):
# Call a function in it
# If no function found, return None
python_file_path = get_stack_file_path(deploy_command_context.stack).parent.joinpath("deploy", "commands.py")
- spec = util.spec_from_file_location("commands", python_file_path)
- imported_stack = util.module_from_spec(spec)
- spec.loader.exec_module(imported_stack)
- return imported_stack.init(deploy_command_context)
+ if python_file_path.exists():
+ spec = util.spec_from_file_location("commands", python_file_path)
+ imported_stack = util.module_from_spec(spec)
+ spec.loader.exec_module(imported_stack)
+ return imported_stack.init(deploy_command_context)
+ else:
+ return None
# TODO: fold this with function above
-def call_stack_deploy_setup(deploy_command_context, extra_args):
+def call_stack_deploy_setup(deploy_command_context, parameters: LaconicStackSetupCommand, extra_args):
# Link with the python file in the stack
# Call a function in it
# If no function found, return None
python_file_path = get_stack_file_path(deploy_command_context.stack).parent.joinpath("deploy", "commands.py")
- spec = util.spec_from_file_location("commands", python_file_path)
- imported_stack = util.module_from_spec(spec)
- spec.loader.exec_module(imported_stack)
- return imported_stack.setup(deploy_command_context, extra_args)
+ if python_file_path.exists():
+ spec = util.spec_from_file_location("commands", python_file_path)
+ imported_stack = util.module_from_spec(spec)
+ spec.loader.exec_module(imported_stack)
+ return imported_stack.setup(deploy_command_context, parameters, extra_args)
+ else:
+ return None
# TODO: fold this with function above
-def call_stack_deploy_create(deployment_context):
+def call_stack_deploy_create(deployment_context, extra_args):
# Link with the python file in the stack
# Call a function in it
# If no function found, return None
python_file_path = get_stack_file_path(deployment_context.command_context.stack).parent.joinpath("deploy", "commands.py")
- spec = util.spec_from_file_location("commands", python_file_path)
- imported_stack = util.module_from_spec(spec)
- spec.loader.exec_module(imported_stack)
- return imported_stack.create(deployment_context)
+ if python_file_path.exists():
+ spec = util.spec_from_file_location("commands", python_file_path)
+ imported_stack = util.module_from_spec(spec)
+ spec.loader.exec_module(imported_stack)
+ return imported_stack.create(deployment_context, extra_args)
+ else:
+ return None
# Inspect the pod yaml to find config files referenced in subdirectories
@@ -155,10 +167,50 @@ def _find_extra_config_dirs(parsed_pod_file, pod):
return config_dirs
+def _get_mapped_ports(stack: str, map_recipe: str):
+ port_map_recipes = ["any-variable-random", "localhost-same", "any-same", "localhost-fixed-random", "any-fixed-random"]
+ ports = _get_ports(stack)
+ if ports:
+ # Implement any requested mapping recipe
+ if map_recipe:
+ if map_recipe in port_map_recipes:
+ for service in ports.keys():
+ ports_array = ports[service]
+ for x in range(0, len(ports_array)):
+ orig_port = ports_array[x]
+ # Strip /udp suffix if present
+ bare_orig_port = orig_port.replace("/udp", "")
+ random_port = random.randint(20000, 50000) # Beware: we're relying on luck to not collide
+ if map_recipe == "any-variable-random":
+ # This is the default so take no action
+ pass
+ elif map_recipe == "localhost-same":
+ # Replace instances of "- XX" with "- 127.0.0.1:XX"
+ ports_array[x] = f"127.0.0.1:{bare_orig_port}:{orig_port}"
+ elif map_recipe == "any-same":
+ # Replace instances of "- XX" with "- 0.0.0.0:XX"
+ ports_array[x] = f"0.0.0.0:{bare_orig_port}:{orig_port}"
+ elif map_recipe == "localhost-fixed-random":
+ # Replace instances of "- XX" with "- 127.0.0.1::XX"
+ ports_array[x] = f"127.0.0.1:{random_port}:{orig_port}"
+ elif map_recipe == "any-fixed-random":
+ # Replace instances of "- XX" with "- 0.0.0.0::XX"
+ ports_array[x] = f"0.0.0.0:{random_port}:{orig_port}"
+ else:
+ print("Error: bad map_recipe")
+ else:
+ print(f"Error: --map-ports-to-host must specify one of: {port_map_recipes}")
+ sys.exit(1)
+ return ports
+
+
@click.command()
@click.option("--output", required=True, help="Write yaml spec file here")
+@click.option("--map-ports-to-host", required=False,
+ help="Map ports to the host as one of: any-variable-random (default), "
+ "localhost-same, any-same, localhost-fixed-random, any-fixed-random")
@click.pass_context
-def init(ctx, output):
+def init(ctx, output, map_ports_to_host):
yaml = get_yaml()
stack = global_options(ctx).stack
verbose = global_options(ctx).verbose
@@ -169,9 +221,8 @@ def init(ctx, output):
if verbose:
print(f"Creating spec file for stack: {stack}")
- ports = _get_ports(stack)
- if ports:
- spec_file_content["ports"] = ports
+ ports = _get_mapped_ports(stack, map_ports_to_host)
+ spec_file_content["ports"] = ports
named_volumes = _get_named_volumes(stack)
if named_volumes:
@@ -187,8 +238,11 @@ def init(ctx, output):
@click.command()
@click.option("--spec-file", required=True, help="Spec file to use to create this deployment")
@click.option("--deployment-dir", help="Create deployment files in this directory")
+# TODO: Hack
+@click.option("--network-dir", help="Network configuration supplied in this directory")
+@click.option("--initial-peers", help="Initial set of persistent peers")
@click.pass_context
-def create(ctx, spec_file, deployment_dir):
+def create(ctx, spec_file, deployment_dir, network_dir, initial_peers):
# This function fails with a useful error message if the file doens't exist
parsed_spec = get_parsed_deployment_spec(spec_file)
stack_name = parsed_spec['stack']
@@ -236,16 +290,26 @@ def create(ctx, spec_file, deployment_dir):
deployment_command_context = ctx.obj
deployment_command_context.stack = stack_name
deployment_context = DeploymentContext(Path(deployment_dir), deployment_command_context)
- call_stack_deploy_create(deployment_context)
+ call_stack_deploy_create(deployment_context, [network_dir, initial_peers])
+# TODO: this code should be in the stack .py files but
+# we haven't yet figured out how to integrate click across
+# the plugin boundary
@click.command()
-@click.option("--node-moniker", help="Help goes here")
-@click.option("--key-name", help="Help goes here")
-@click.option("--initialize-network", is_flag=True, default=False, help="Help goes here")
-@click.option("--join-network", is_flag=True, default=False, help="Help goes here")
-@click.option("--create-network", is_flag=True, default=False, help="Help goes here")
+@click.option("--node-moniker", help="Moniker for this node")
+@click.option("--chain-id", help="The new chain id")
+@click.option("--key-name", help="Name for new node key")
+@click.option("--gentx-files", help="List of comma-delimited gentx filenames from other nodes")
+@click.option("--genesis-file", help="Genesis file for the network")
+@click.option("--initialize-network", is_flag=True, default=False, help="Initialize phase")
+@click.option("--join-network", is_flag=True, default=False, help="Join phase")
+@click.option("--create-network", is_flag=True, default=False, help="Create phase")
+@click.option("--network-dir", help="Directory for network files")
@click.argument('extra_args', nargs=-1)
@click.pass_context
-def setup(ctx, node_moniker, key_name, initialize_network, join_network, create_network, extra_args):
- call_stack_deploy_setup(ctx.obj, extra_args)
+def setup(ctx, node_moniker, chain_id, key_name, gentx_files, genesis_file, initialize_network, join_network, create_network,
+ network_dir, extra_args):
+ parmeters = LaconicStackSetupCommand(chain_id, node_moniker, key_name, initialize_network, join_network, create_network,
+ gentx_files, genesis_file, network_dir)
+ call_stack_deploy_setup(ctx.obj, parmeters, extra_args)
diff --git a/app/setup_repositories.py b/app/setup_repositories.py
index d275a986..c14dccb4 100644
--- a/app/setup_repositories.py
+++ b/app/setup_repositories.py
@@ -1,4 +1,4 @@
-# Copyright © 2022 Cerc
+# Copyright © 2022, 2023 Vulcanize
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
@@ -76,7 +76,7 @@ def _get_repo_current_branch_or_tag(full_filesystem_repo_path):
try:
current_repo_branch_or_tag = git.Repo(full_filesystem_repo_path).active_branch.name
is_branch = True
- except TypeError as error:
+ except TypeError:
# This means that the current ref is not a branch, so possibly a tag
# Let's try to get the tag
current_repo_branch_or_tag = git.Repo(full_filesystem_repo_path).git.describe("--tags", "--exact-match")
@@ -96,7 +96,9 @@ def process_repo(verbose, quiet, dry_run, pull, check_only, git_ssh, dev_root_pa
repoName = repo_path.split("/")[-1]
full_filesystem_repo_path = os.path.join(dev_root_path, repoName)
is_present = os.path.isdir(full_filesystem_repo_path)
- (current_repo_branch_or_tag, is_branch) = _get_repo_current_branch_or_tag(full_filesystem_repo_path) if is_present else (None, None)
+ (current_repo_branch_or_tag, is_branch) = _get_repo_current_branch_or_tag(
+ full_filesystem_repo_path
+ ) if is_present else (None, None)
if not quiet:
present_text = f"already exists active {'branch' if is_branch else 'tag'}: {current_repo_branch_or_tag}" if is_present \
else 'Needs to be fetched'
@@ -116,7 +118,7 @@ def process_repo(verbose, quiet, dry_run, pull, check_only, git_ssh, dev_root_pa
origin = git_repo.remotes.origin
origin.pull(progress=None if quiet else GitProgress())
else:
- print(f"skipping pull because this repo checked out a tag")
+ print("skipping pull because this repo checked out a tag")
else:
print("(git pull skipped)")
if not is_present:
@@ -143,7 +145,10 @@ def process_repo(verbose, quiet, dry_run, pull, check_only, git_ssh, dev_root_pa
branch_to_checkout = repo_branch
if branch_to_checkout:
- if current_repo_branch_or_tag is None or (current_repo_branch_or_tag and (current_repo_branch_or_tag != branch_to_checkout)):
+ if current_repo_branch_or_tag is None or (
+ current_repo_branch_or_tag and (
+ current_repo_branch_or_tag != branch_to_checkout)
+ ):
if not quiet:
print(f"switching to branch {branch_to_checkout} in repo {repo_path}")
git_repo = git.Repo(full_filesystem_repo_path)
diff --git a/app/stack_state.py b/app/stack_state.py
index 830a47f7..180a9084 100644
--- a/app/stack_state.py
+++ b/app/stack_state.py
@@ -1,4 +1,4 @@
-# Copyright © 2023 Cerc
+# Copyright © 2023 Vulcanize
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
@@ -15,6 +15,7 @@
from enum import Enum
+
class State(Enum):
CREATED = 1
CONFIGURED = 2
diff --git a/app/update.py b/app/update.py
new file mode 100644
index 00000000..9f70b06e
--- /dev/null
+++ b/app/update.py
@@ -0,0 +1,90 @@
+# Copyright © 2023 Vulcanize
+
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+
+import click
+import datetime
+import filecmp
+import os
+from pathlib import Path
+import requests
+import sys
+import stat
+import shutil
+import validators
+from app.util import get_yaml
+
+
+def _download_url(url: str, file_path: Path):
+ r = requests.get(url, stream=True)
+ r.raw.decode_content = True
+ with open(file_path, 'wb') as f:
+ shutil.copyfileobj(r.raw, f)
+
+
+def _error_exit(s: str):
+ print(s)
+ sys.exit(1)
+
+
+# Note at present this probably won't work on non-Unix based OSes like Windows
+@click.command()
+@click.option("--check-only", is_flag=True, default=False, help="only check, don't update")
+@click.pass_context
+def command(ctx, check_only):
+ '''update shiv binary from a distribution url'''
+ # Get the distribution URL from config
+ config_key = 'distribution-url'
+ config_file_path = Path(os.path.expanduser("~/.laconic-so/config.yml"))
+ if not config_file_path.exists():
+ _error_exit(f"Error: Config file: {config_file_path} not found")
+ yaml = get_yaml()
+ config = yaml.load(open(config_file_path, "r"))
+ if "distribution-url" not in config:
+ _error_exit(f"Error: {config_key} not defined in {config_file_path}")
+ distribution_url = config[config_key]
+ # Sanity check the URL
+ if not validators.url(distribution_url):
+ _error_exit(f"ERROR: distribution url: {distribution_url} is not valid")
+ # Figure out the filename for ourselves
+ shiv_binary_path = Path(sys.argv[0])
+ timestamp_filename = f"laconic-so-download-{datetime.datetime.now().strftime('%y%m%d-%H%M%S')}"
+ temp_download_path = shiv_binary_path.parent.joinpath(timestamp_filename)
+ # Download the file to a temp filename
+ if ctx.obj.verbose:
+ print(f"Downloading from: {distribution_url} to {temp_download_path}")
+ _download_url(distribution_url, temp_download_path)
+ # Set the executable bit
+ existing_mode = os.stat(temp_download_path)
+ os.chmod(temp_download_path, existing_mode.st_mode | stat.S_IXUSR)
+ # Switch the new file for the path we ran from
+ # Check if the downloaded file is identical to the existing one
+ same = filecmp.cmp(temp_download_path, shiv_binary_path)
+ if same:
+ if not ctx.obj.quiet or check_only:
+ print("No update available, latest version already installed")
+ else:
+ if not ctx.obj.quiet:
+ print("Update available")
+ if check_only:
+ if not ctx.obj.quiet:
+ print("Check-only node, update not installed")
+ else:
+ if not ctx.obj.quiet:
+ print("Installing...")
+ if ctx.obj.verbose:
+ print(f"Replacing: {shiv_binary_path} with {temp_download_path}")
+ os.replace(temp_download_path, shiv_binary_path)
+ if not ctx.obj.quiet:
+ print("Run \"laconic-so version\" to see the newly installed version")
diff --git a/app/util.py b/app/util.py
index 42a4673e..9d9eaa33 100644
--- a/app/util.py
+++ b/app/util.py
@@ -1,4 +1,4 @@
-# Copyright © 2022, 2023 Cerc
+# Copyright © 2022, 2023 Vulcanize
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
diff --git a/app/version.py b/app/version.py
index 7af18dc1..5a5c33d4 100644
--- a/app/version.py
+++ b/app/version.py
@@ -1,4 +1,4 @@
-# Copyright © 2023 Cerc
+# Copyright © 2023 Vulcanize
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
@@ -16,6 +16,7 @@
import click
import importlib.resources
+
@click.command()
@click.pass_context
def command(ctx):
diff --git a/cli.py b/cli.py
index e9a11d25..63823715 100644
--- a/cli.py
+++ b/cli.py
@@ -1,4 +1,4 @@
-# Copyright © 2022 Cerc
+# Copyright © 2022, 2023 Vulcanize
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
@@ -14,29 +14,19 @@
# along with this program. If not, see .
import click
-from dataclasses import dataclass
+from app.command_types import CommandOptions
from app import setup_repositories
from app import build_containers
from app import build_npms
from app import deploy
from app import version
from app import deployment
+from app import update
CONTEXT_SETTINGS = dict(help_option_names=['-h', '--help'])
-@dataclass
-class Options:
- stack: str
- quiet: bool = False
- verbose: bool = False
- dry_run: bool = False
- local_stack: bool = False
- debug: bool = False
- continue_on_error: bool = False
-
-
@click.group(context_settings=CONTEXT_SETTINGS)
@click.option('--stack', help="specify a stack to build/deploy")
@click.option('--quiet', is_flag=True, default=False)
@@ -49,7 +39,7 @@ class Options:
@click.pass_context
def cli(ctx, stack, quiet, verbose, dry_run, local_stack, debug, continue_on_error):
"""Laconic Stack Orchestrator"""
- ctx.obj = Options(stack, quiet, verbose, dry_run, local_stack, debug, continue_on_error)
+ ctx.obj = CommandOptions(stack, quiet, verbose, dry_run, local_stack, debug, continue_on_error)
cli.add_command(setup_repositories.command, "setup-repositories")
@@ -59,3 +49,4 @@ cli.add_command(deploy.command, "deploy") # deploy is an alias for deploy-syste
cli.add_command(deploy.command, "deploy-system")
cli.add_command(deployment.command, "deployment")
cli.add_command(version.command, "version")
+cli.add_command(update.command, "update")
diff --git a/docs/README.md b/docs/README.md
new file mode 100644
index 00000000..a3146301
--- /dev/null
+++ b/docs/README.md
@@ -0,0 +1,8 @@
+# Stack Orchestrator
+
+Here you will find information about the design of stack orchestrator, contributing to it, and deploying services/applications that combine two or more "stacks".
+
+Most "stacks" contain their own README which has plenty of information on deploying, but stacks can be combined in a variety of ways which are document here, for example:
+
+- [Gitea with Laconicd Fixturenet](./gitea-with-laconicd-fixturenet.md)
+- [Laconicd Registry with Console](./laconicd-with-console.md)
diff --git a/docs/adding-a-new-stack.md b/docs/adding-a-new-stack.md
new file mode 100644
index 00000000..4fbf27b2
--- /dev/null
+++ b/docs/adding-a-new-stack.md
@@ -0,0 +1,71 @@
+# Adding a new stack
+
+See [this PR](https://github.com/cerc-io/stack-orchestrator/pull/434) for an example of how to currently add a minimal stack to stack orchestrator. The [reth stack](https://github.com/cerc-io/stack-orchestrator/pull/435) is another good example.
+
+For external developers, we recommend forking this repo and adding your stack directly to your fork. This initially requires running in "developer mode" as described [here](/docs/CONTRIBUTING.md). Check out the [Namada stack](https://github.com/vknowable/stack-orchestrator/blob/main/app/data/stacks/public-namada/digitalocean_quickstart.md) from Knowable to see how that is done.
+
+Core to the feature completeness of stack orchestrator is to [decouple the tool functionality from payload](https://github.com/cerc-io/stack-orchestrator/issues/315) which will no longer require forking to add a stack.
+
+## Example
+
+- in `app/data/stacks/my-new-stack/stack.yml` add:
+
+```yaml
+version: "0.1"
+name: my-new-stack
+repos:
+ - github.com/my-org/my-new-stack
+containers:
+ - cerc/my-new-stack
+pods:
+ - my-new-stack
+```
+
+- in `app/data/container-build/cerc-my-new-stack/build.sh` add:
+
+```yaml
+#!/usr/bin/env bash
+# Build the my-new-stack image
+source ${CERC_CONTAINER_BASE_DIR}/build-base.sh
+docker build -t cerc/my-new-stack:local -f ${CERC_REPO_BASE_DIR}/my-new-stack/Dockerfile ${build_command_args} ${CERC_REPO_BASE_DIR}/my-new-stack
+```
+
+- in `app/data/compose/docker-compose-my-new-stack.yml` add:
+
+```yaml
+version: "3.2"
+
+services:
+ my-new-stack:
+ image: cerc/my-new-stack:local
+ restart: always
+ ports:
+ - "0.0.0.0:3000:3000"
+```
+
+- in `app/data/repository-list.txt` add:
+
+```bash
+github.com/my-org/my-new-stack
+```
+whereby that repository contains your source code and a `Dockerfile`, and matches the `repos:` field in the `stack.yml`.
+
+- in `app/data/container-image-list.txt` add:
+
+```bash
+cerc/my-new-stack
+```
+
+- in `app/data/pod-list.txt` add:
+
+```bash
+my-new-stack
+```
+
+Now, the following commands will fetch, build, and deploy you app:
+
+```bash
+laconic-so --stack my-new-stack setup-repositories
+laconic-so --stack my-new-stack build-containers
+laconic-so --stack my-new-stack deploy-system up
+```
diff --git a/docs/cli.md b/docs/cli.md
index 287d6c68..e9f06108 100644
--- a/docs/cli.md
+++ b/docs/cli.md
@@ -6,7 +6,7 @@ Sub-commands and flags
Clone a single repository:
```
-$ laconic-so setup-repositories --include cerc-io/go-ethereum
+$ laconic-so setup-repositories --include github.com/cerc-io/go-ethereum
```
Clone the repositories for a stack:
```
diff --git a/docs/gitea-with-laconicd-fixturenet.md b/docs/gitea-with-laconicd-fixturenet.md
new file mode 100644
index 00000000..f0b3e804
--- /dev/null
+++ b/docs/gitea-with-laconicd-fixturenet.md
@@ -0,0 +1,80 @@
+# Gitea x NPMs X Laconicd
+
+Deploy a local Gitea server, publish NPM packages to it, then use those packages to build a Laconicd fixturenet. Demonstrates several components of the Laconic stack
+
+### Build and Deploy Gitea
+
+```bash
+laconic-so --stack build-support build-containers
+laconic-so --stack package-registry setup-repositories
+laconic-so --stack package-registry build-containers
+laconic-so --stack package-registry deploy up
+```
+
+These commands can take awhile. Eventually, some instructions and a token will output. Set `CERC_NPM_AUTH_TOKEN`:
+
+```bash
+export CERC_NPM_AUTH_TOKEN=
+```
+
+### Configure the hostname gitea.local
+
+How to do this depends on your operating system but usually involves editing a `hosts` file. For example, on Linux add this line to the file `/etc/hosts` (needs sudo):
+
+```bash
+127.0.0.1 gitea.local
+```
+
+Test with:
+
+```bash
+ping gitea.local
+```
+
+```bash
+PING gitea.local (127.0.0.1) 56(84) bytes of data.
+64 bytes from localhost (127.0.0.1): icmp_seq=1 ttl=64 time=0.147 ms
+64 bytes from localhost (127.0.0.1): icmp_seq=2 ttl=64 time=0.033 ms
+```
+
+Although not necessary in order to build and publish packages, you can now access the Gitea web interface at: [http://gitea.local:3000](http://gitea.local:3000) using these credentials: `gitea_admin/admin1234` (Note: please properly secure Gitea if public internet access is allowed).
+
+### Build npm Packages
+
+Clone the required repositories:
+
+```bash
+laconic-so --stack fixturenet-laconicd setup-repositories
+```
+
+Build and publish the npm packages:
+
+```bash
+laconic-so --stack fixturenet-laconicd build-npms
+```
+
+Navigate to the Gitea console and switch to the `cerc-io` user then find the `Packages` tab to confirm that these two npm packages have been published:
+
+- `@cerc-io/laconic-registry-cli`
+- `@cerc-io/laconic-sdk`
+
+### Build and deploy fixturenet containers
+
+```bash
+laconic-so --stack fixturenet-laconicd build-containers
+laconic-so --stack fixturenet-laconicd deploy up
+```
+
+Check the logs:
+
+```bash
+laconic-so --stack fixturenet-laconicd deploy logs
+```
+
+### Test with the registry CLI
+
+```bash
+laconic-so --stack fixturenet-laconicd deploy exec cli "laconic cns status"
+```
+
+Try additional CLI commands, documented [here](https://github.com/cerc-io/laconic-registry-cli#operations).
diff --git a/docs/laconicd-fixturenet.md b/docs/laconicd-with-console.md
similarity index 100%
rename from docs/laconicd-fixturenet.md
rename to docs/laconicd-with-console.md
diff --git a/docs/spec.md b/docs/spec.md
index ad6ed3c9..1dc9ac62 100644
--- a/docs/spec.md
+++ b/docs/spec.md
@@ -1,7 +1,6 @@
# Specification
-(note this page is out of date)
-
+Note: this page is out of date (but still useful) - it will no longer be useful once stacks are [decoupled from the tool functionality](https://github.com/cerc-io/stack-orchestrator/issues/315).
## Implementation
diff --git a/requirements.txt b/requirements.txt
index 895e677f..7f60f9dd 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -6,3 +6,5 @@ click>=8.1.6
PyYAML>=6.0.1
ruamel.yaml>=0.17.32
pydantic==1.10.9
+tomli==2.0.1
+validators==0.22.0
diff --git a/scripts/quick-install-linux.sh b/scripts/quick-install-linux.sh
index 4f4d044b..5670a403 100755
--- a/scripts/quick-install-linux.sh
+++ b/scripts/quick-install-linux.sh
@@ -5,6 +5,9 @@ fi
install_dir=~/bin
+# Skip the package install stuff if so directed
+if ! [[ -n "$CERC_SO_INSTALL_SKIP_PACKAGES" ]]; then
+
# First display a reasonable warning to the user unless run with -y
if ! [[ $# -eq 1 && $1 == "-y" ]]; then
echo "**************************************************************************************"
@@ -30,6 +33,21 @@ fi
# Determine if we are on Debian or Ubuntu
linux_distro=$(lsb_release -a 2>/dev/null | grep "^Distributor ID:" | cut -f 2)
+# Some systems don't have lsb_release installed (e.g. ChromeOS) and so we try to
+# use /etc/os-release instead
+if [[ -z "$linux_distro" ]]; then
+ if [[ -f "/etc/os-release" ]]; then
+ distro_name_string=$(grep "^NAME=" /etc/os-release | cut -d '=' -f 2)
+ if [[ $distro_name_string =~ Debian ]]; then
+ linux_distro="Debian"
+ elif [[ $distro_name_string =~ Ubuntu ]]; then
+ linux_distro="Ubuntu"
+ fi
+ else
+ echo "Failed to identify distro: /etc/os-release doesn't exist"
+ exit 1
+ fi
+fi
case $linux_distro in
Debian)
echo "Installing docker for Debian"
@@ -113,13 +131,20 @@ sudo apt -y install docker-ce docker-ce-cli containerd.io docker-buildx-plugin d
# Allow the current user to use Docker
sudo usermod -aG docker $USER
+# End of long if block: Skip the package install stuff if so directed
+fi
+
echo "**************************************************************************************"
echo "Installing laconic-so"
# install latest `laconic-so`
+distribution_url=https://github.com/cerc-io/stack-orchestrator/releases/latest/download/laconic-so
install_filename=${install_dir}/laconic-so
mkdir -p ${install_dir}
-curl -L -o ${install_filename} https://github.com/cerc-io/stack-orchestrator/releases/latest/download/laconic-so
+curl -L -o ${install_filename} ${distribution_url}
chmod +x ${install_filename}
+# Set up config file for self-update feature
+mkdir ~/.laconic-so
+echo "distribution-url: ${distribution_url}" > ~/.laconic-so/config.yml
echo "**************************************************************************************"
# Check if our PATH line is already there
diff --git a/setup.py b/setup.py
index 562914eb..86050fbc 100644
--- a/setup.py
+++ b/setup.py
@@ -19,8 +19,7 @@ setup(
install_requires=[requirements],
python_requires='>=3.7',
include_package_data=True,
- # See: https://github.com/pypa/setuptools/issues/1806
- package_data={'': ['data/*', 'data/*/*', 'data/*/*/*', 'data/*/*/*/*', 'data/*/*/*/*/*']},
+ package_data={'': ['data/**']},
classifiers=[
"Programming Language :: Python :: 3.8",
"Operating System :: OS Independent",
diff --git a/tests/laconic-network/run-test.sh b/tests/laconic-network/run-test.sh
new file mode 100755
index 00000000..0ee5cb0e
--- /dev/null
+++ b/tests/laconic-network/run-test.sh
@@ -0,0 +1,58 @@
+#!/bin/bash
+set -e
+if [ -n "$CERC_SCRIPT_DEBUG" ]; then
+ set -x
+fi
+
+node_count=4
+node_dir_prefix="laconic-network-dir"
+chain_id="laconic_81337-6"
+node_moniker_prefix="node"
+
+echo "Deleting any existing network directories..."
+for (( i=1 ; i<=$node_count ; i++ ));
+do
+ node_network_dir=${node_dir_prefix}${i}
+ if [[ -d $node_network_dir ]]; then
+ echo "Deleting ${node_network_dir}"
+ rm -rf ${node_network_dir}
+ fi
+done
+
+echo "Initalizing ${node_count} nodes networks..."
+for (( i=1 ; i<=$node_count ; i++ ));
+do
+ node_network_dir=${node_dir_prefix}${i}
+ node_moniker=${node_moniker_prefix}${i}
+ laconic-so --stack mainnet-laconic deploy setup --network-dir ${node_network_dir} --initialize-network --chain-id ${chain_id} --node-moniker ${node_moniker}
+done
+
+echo "Joining ${node_count} nodes to the network..."
+for (( i=1 ; i<=$node_count ; i++ ));
+do
+ node_network_dir=${node_dir_prefix}${i}
+ node_moniker=${node_moniker_prefix}${i}
+ laconic-so --stack mainnet-laconic deploy setup --network-dir ${node_network_dir} --join-network --key-name ${node_moniker}
+done
+
+echo "Merging ${node_count} nodes genesis txns..."
+gentx_files=""
+delimeter=""
+# Note: start at node 2 here because we're going to copy to node 1
+for (( i=2 ; i<=$node_count ; i++ ));
+do
+ node_network_dir=${node_dir_prefix}${i}
+ node_gentx_file=$(ls ${node_network_dir}/config/gentx/*.json)
+ gentx_files+=${delimeter}${node_gentx_file}
+ delimeter=","
+done
+# Generate the genesis file on node 1
+laconic-so --stack mainnet-laconic deploy setup --network-dir ${node_dir_prefix}1 --create-network --gentx-files ${gentx_files}
+genesis_file=${node_dir_prefix}1/config/genesis.json
+# Now import the genesis file to the other nodes
+for (( i=2 ; i<=$node_count ; i++ ));
+do
+ echo "Importing genesis.json into node ${i}"
+ node_network_dir=${node_dir_prefix}${i}
+ laconic-so --stack mainnet-laconic deploy setup --network-dir ${node_network_dir} --create-network --genesis-file ${genesis_file}
+done