Update mainnet-eth stack to include auth and metrics. (#492)

This commit is contained in:
Thomas E Lackey 2023-08-29 11:32:37 -05:00 committed by GitHub
parent 125af19890
commit 7f9b556f9d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
21 changed files with 4406 additions and 48 deletions

View File

@ -0,0 +1,51 @@
version: '3.8'
services:
keycloak-db:
image: postgres:14-alpine
env_file:
- ../config/mainnet-eth-keycloak/keycloak.env
healthcheck:
test: ["CMD", "nc", "-v", "localhost", "5432"]
interval: 30s
timeout: 10s
retries: 10
start_period: 3s
volumes:
- mainnet_eth_keycloak_db:/var/lib/postgresql/data
ports:
- 5432
keycloak:
image: cerc/keycloak:local
env_file:
- ../config/mainnet-eth-keycloak/keycloak.env
environment:
JAVA_OPTS_APPEND: "-Dkeycloak.migration.action=import -Dkeycloak.migration.provider=dir -Dkeycloak.migration.dir=/import -Dkeycloak.migration.strategy=IGNORE_EXISTING"
volumes:
- ../config/mainnet-eth-keycloak/import:/import
ports:
- 8080
command: ["start"]
depends_on:
keycloak-db:
condition: service_healthy
keycloak-reg-ui:
image: cerc/keycloak-reg-ui:local
env_file:
- ../config/mainnet-eth-keycloak/keycloak.env
volumes:
- ../config/mainnet-eth-keycloak/ui:/config
ports:
- 80
keycloak-reg-api:
image: cerc/keycloak-reg-api:local
env_file:
- ../config/mainnet-eth-keycloak/keycloak.env
ports:
- 9292
volumes:
mainnet_eth_keycloak_db:

View File

@ -0,0 +1,25 @@
version: "3.2"
services:
prometheus:
restart: always
image: prom/prometheus
depends_on:
mainnet-eth-geth-1:
condition: service_healthy
env_file:
- ../config/mainnet-eth-metrics/metrics.env
volumes:
- ../config/mainnet-eth-metrics/prometheus/etc:/etc/prometheus
ports:
- "9090"
grafana:
restart: always
image: grafana/grafana
env_file:
- ../config/mainnet-eth-metrics/metrics.env
volumes:
- ../config/mainnet-eth-metrics/grafana/etc/provisioning/dashboards:/etc/grafana/provisioning/dashboards
- ../config/mainnet-eth-metrics/grafana/etc/provisioning/datasources:/etc/grafana/provisioning/datasources
- ../config/mainnet-eth-metrics/grafana/etc/dashboards:/etc/grafana/dashboards
ports:
- "3000"

View File

@ -6,16 +6,13 @@ services:
hostname: mainnet-eth-geth-1
cap_add:
- SYS_PTRACE
environment:
CERC_REMOTE_DEBUG: "true"
CERC_RUN_STATEDIFF: ${CERC_RUN_STATEDIFF:-detect}
CERC_STATEDIFF_DB_NODE_ID: 1
CERC_SCRIPT_DEBUG: ${CERC_SCRIPT_DEBUG}
image: cerc/go-ethereum:local
entrypoint: /bin/sh
command: -c "/opt/run-geth.sh"
env_file:
- ../config/mainnet-eth/geth.env
volumes:
- mainnet_eth_geth_1_data:/root/ethdata
- mainnet_eth_geth_1_data:/data
- mainnet_eth_config_data:/etc/mainnet-eth
- ../config/mainnet-eth/scripts/run-geth.sh:/opt/run-geth.sh
healthcheck:
@ -25,30 +22,48 @@ services:
retries: 10
start_period: 3s
ports:
# http api
- "8545"
# ws api
- "8546"
# ws el
- "8551"
# p2p
- "30303"
- "30303/udp"
# debugging
- "40000"
# metrics
- "6060"
mainnet-eth-lighthouse-1:
restart: always
hostname: mainnet-eth-lighthouse-1
healthcheck:
test: ["CMD", "wget", "--tries=1", "--connect-timeout=1", "--quiet", "-O", "-", "http://localhost:8001/eth/v2/beacon/blocks/head"]
test: ["CMD", "wget", "--tries=1", "--connect-timeout=1", "--quiet", "-O", "-", "http://localhost:5052/eth/v2/beacon/blocks/head"]
interval: 30s
timeout: 10s
retries: 10
start_period: 30s
environment:
EXECUTION_ENDPOINT: "http://mainnet-eth-geth-1:8551"
LIGHTHOUSE_EXECUTION_ENDPOINT: "http://mainnet-eth-geth-1:8551"
env_file:
- ../config/mainnet-eth/lighthouse.env
image: cerc/lighthouse:local
entrypoint: /bin/sh
command: -c "/opt/run-lighthouse.sh"
volumes:
- mainnet_eth_lighthouse_1_data:/var/lighthouse-data-dir
- mainnet_eth_lighthouse_1_data:/data
- mainnet_eth_config_data:/etc/mainnet-eth
- ../config/mainnet-eth/scripts/run-lighthouse.sh:/opt/run-lighthouse.sh
ports:
- "8001"
# api
- "5052"
# metrics
- "5054"
# p2p
- "9000"
- "9000/udp"
volumes:
mainnet_eth_config_data:

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,29 @@
POSTGRES_DB=keycloak
POSTGRES_USER=keycloak
POSTGRES_PASSWORD=keycloak
KC_DB=postgres
KC_DB_URL_HOST=keycloak-db
KC_DB_URL_DATABASE=${POSTGRES_DB}
KC_DB_USERNAME=${POSTGRES_USER}
KC_DB_PASSWORD=${POSTGRES_PASSWORD}
KC_DB_SCHEMA=public
KC_HOSTNAME=localhost
KC_HTTP_ENABLED="true"
KC_HTTP_RELATIVE_PATH="/auth"
KC_HOSTNAME_STRICT_HTTPS="false"
KEYCLOAK_ADMIN=admin
KEYCLOAK_ADMIN_PASSWORD=admin
X_API_CHECK_REALM=cerc
X_API_CHECK_CLIENT_ID="%user_id%"
# keycloak-reg-api
CERC_KCUSERREG_LISTEN_PORT=9292
CERC_KCUSERREG_LISTEN_ADDR='0.0.0.0'
CERC_KCUSERREG_API_URL='http://keycloak:8080/auth'
CERC_KCUSERREG_REG_USER="${KEYCLOAK_ADMIN}"
CERC_KCUSERREG_REG_PW="${KEYCLOAK_ADMIN_PASSWORD}"
CERC_KCUSERREG_REG_CLIENT_ID='admin-cli'
CERC_KCUSERREG_TARGET_REALM=cerc
CERC_KCUSERREG_TARGET_GROUPS=eth
CERC_KCUSERREG_CREATE_ENABLED=true

View File

@ -0,0 +1,107 @@
server {
listen 80;
server_name my.example.com;
# See: https://github.com/acmesh-official/acme.sh/wiki/Stateless-Mode
# and https://datatracker.ietf.org/doc/html/rfc8555
location ~ ^/\.well-known/acme-challenge/([-_a-zA-Z0-9]+)$ {
default_type text/plain;
return 200 "$1.MY_ACCOUNT_THUMBPRINT_GOES_HERE";
}
location / {
return 301 https://$host$request_uri;
}
}
upstream geth-pool {
keepalive 100;
hash $user_id consistent;
server server-a:8545;
server server-b:8545;
server server-c:8545;
}
# self-reg happens on one server for clarity
upstream reg-ui-pool {
keepalive 100;
server server-a:8085;
}
upstream reg-api-pool {
keepalive 100;
server server-a:8086;
}
# auth uses server-a if available
upstream auth-pool {
keepalive 100;
server server-a:8080;
server server-b:8080 backup;
server server-c:8080 backup;
}
log_format upstreamlog '[$time_local] $remote_addr $user_id - $server_name $host to: $upstream_addr: $request $status upstream_response_time $upstream_response_time msec $msec request_time $request_time';
proxy_cache_path /var/cache/nginx/auth_cache levels=1 keys_zone=auth_cache:1m max_size=5m inactive=60m;
server {
listen 443 ssl http2;
server_name my.example.com;
access_log /var/log/nginx/my.example.com-access.log upstreamlog;
error_log /var/log/nginx/my.example.com-error.log;
ssl_certificate /etc/nginx/ssl/my.example.com/cert.pem;
ssl_certificate_key /etc/nginx/ssl/my.example.com/key.pem;
error_page 500 502 503 504 /50x.html;
location = /50x.html {
root /usr/share/nginx/html;
}
#rewrite ^/?$ /newuser/;
rewrite ^/?$ https://www.example.com/;
# geth-pool ETH API
location ~ ^/v1/eth/?([^/]*)$ {
set $apiKey $1;
if ($apiKey = '') {
set $apiKey $http_X_API_KEY;
}
auth_request /auth;
auth_request_set $user_id $sent_http_x_user_id;
rewrite /.*$ / break;
client_max_body_size 3m;
client_body_buffer_size 3m;
proxy_buffer_size 32k;
proxy_buffers 16 32k;
proxy_busy_buffers_size 96k;
proxy_pass http://geth-pool;
proxy_set_header X-Original-Remote-Addr $remote_addr;
proxy_set_header X-User-Id $user_id;
}
# keycloak
location = /auth {
internal;
proxy_cache auth_cache;
proxy_cache_key "$apiKey";
proxy_cache_valid 200 300s;
proxy_cache_valid 401 30s;
proxy_pass http://auth-pool/auth/realms/cerc/check?memberOf=eth&apiKey=$apiKey;
proxy_pass_request_body off;
proxy_set_header Content-Length "";
proxy_set_header X-Original-URI $request_uri;
proxy_set_header X-Original-Remote-Addr $remote_addr;
proxy_set_header X-Original-Host $host;
}
location /newuser/ {
proxy_pass http://reg-ui-pool/;
}
location /user-api/ {
proxy_pass http://reg-api-pool/;
}
}

View File

@ -0,0 +1,138 @@
#!/usr/bin/env python3
import argparse
import os
import sys
import psycopg
import random
from subprocess import Popen
from fabric import Connection
def dump_src_db_to_file(db_host, db_port, db_user, db_password, db_name, file_name):
command = f"pg_dump -h {db_host} -p {db_port} -U {db_user} -d {db_name} -c --inserts -f {file_name}"
my_env = os.environ.copy()
my_env["PGPASSWORD"] = db_password
print(f"Exporting from {db_host}:{db_port}/{db_name} to {file_name}... ", end="")
ret = Popen(command, shell=True, env=my_env).wait()
print("DONE")
return ret
def establish_ssh_tunnel(ssh_host, ssh_port, ssh_user, db_host, db_port):
local_port = random.randint(11000, 12000)
conn = Connection(host=ssh_host, port=ssh_port, user=ssh_user)
fw = conn.forward_local(
local_port=local_port, remote_port=db_port, remote_host=db_host
)
return conn, fw, local_port
def load_db_from_file(db_host, db_port, db_user, db_password, db_name, file_name):
connstr = "host=%s port=%s user=%s password=%s sslmode=disable dbname=%s" % (
db_host,
db_port,
db_user,
db_password,
db_name,
)
with psycopg.connect(connstr) as conn:
with conn.cursor() as cur:
print(
f"Importing from {file_name} to {db_host}:{db_port}/{db_name}... ",
end="",
)
cur.execute(open(file_name, "rt").read())
print("DONE")
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--src-dbhost", help="DB hostname", default="localhost")
parser.add_argument("--src-dbport", help="DB port", default=5432, type=int)
parser.add_argument("--src-dbuser", help="DB username", default="keycloak")
parser.add_argument("--src-dbpw", help="DB password", required=True)
parser.add_argument("--src-dbname", help="dbname", default="keycloak")
parser.add_argument(
"--dst-file", help="Destination filename", default="keycloak-mirror.sql"
)
parser.add_argument("--live-import", help="run the import", action="store_true")
parser.add_argument("--dst-dbhost", help="DB hostname", default="localhost")
parser.add_argument("--dst-dbport", help="DB port", default=5432, type=int)
parser.add_argument("--dst-dbuser", help="DB username", default="keycloak")
parser.add_argument("--dst-dbpw", help="DB password")
parser.add_argument("--dst-dbname", help="dbname", default="keycloak")
parser.add_argument("--ssh-host", help="SSH hostname")
parser.add_argument("--ssh-port", help="SSH port", default=22, type=int)
parser.add_argument("--ssh-user", help="SSH user")
args = parser.parse_args()
if args.live_import and not args.dst_dbpw:
print("--dst-dbpw is required if importing", file=sys.stderr)
sys.exit(2)
remove_sql_file = False
if args.dst_dbhost and not args.dst_file:
remove_sql_file = True
dst_file = args.dst_file
if not dst_file:
dst_file = "keycloak-mirror.sql"
dump_src_db_to_file(
args.src_dbhost,
args.src_dbport,
args.src_dbuser,
args.src_dbpw,
args.src_dbname,
dst_file,
)
if args.live_import:
try:
if args.ssh_host:
dst_dbport = random.randint(11000, 12000)
print(
f"Establishing SSH tunnel from 127.0.0.1:{dst_dbport} to {args.ssh_host}->{args.dst_dbhost}:{args.dst_dbport}... ",
end="",
)
with Connection(
host=args.ssh_host, port=args.ssh_port, user=args.ssh_user
).forward_local(
local_port=dst_dbport,
remote_port=args.dst_dbport,
remote_host=args.dst_dbhost,
):
print("DONE")
load_db_from_file(
args.dst_dbhost,
args.dst_dbport,
args.dst_dbuser,
args.dst_dbpw,
args.dst_dbname,
dst_file,
)
else:
load_db_from_file(
args.dst_dbhost,
args.dst_dbport,
args.dst_dbuser,
args.dst_dbpw,
args.dst_dbname,
dst_file,
)
finally:
if args.live_import:
print(f"Removing {dst_file}... ", end="")
os.remove(dst_file)
print("DONE")

View File

@ -0,0 +1,3 @@
fabric
psycopg~=3.1.8
psycopg_binary

View File

@ -0,0 +1,4 @@
web:
path: ''
api:
url: 'http://keycloak-reg-api:9292'

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,9 @@
apiVersion: 1
providers:
- name: dashboards
type: file
updateIntervalSeconds: 10
options:
path: /etc/grafana/dashboards
foldersFromFilesStructure: true

View File

@ -0,0 +1,19 @@
apiVersion: 1
datasources:
- id: 1
uid: jZUuGao4k
orgId: 1
name: Prometheus
type: prometheus
typeName: Prometheus
typeLogoUrl: public/app/plugins/datasource/prometheus/img/prometheus_logo.svg
access: proxy
url: http://prometheus:9090
user: ""
database: ""
basicAuth: false
isDefault: true
jsonData:
httpMethod: POST
readOnly: false

View File

@ -0,0 +1,2 @@
# grafana
GF_SECURITY_ADMIN_PASSWORD=changeme6325

View File

@ -0,0 +1,19 @@
global:
scrape_interval: 5s
evaluation_interval: 15s
scrape_configs:
# geth
- job_name: 'geth'
metrics_path: /debug/metrics/prometheus
scheme: http
static_configs:
- targets: ['mainnet-eth-geth-1:6060']
# keycloak
- job_name: 'keycloak'
scrape_interval: 5s
metrics_path: /auth/realms/cerc/metrics
scheme: http
static_configs:
- targets: ['keycloak:8080']

View File

@ -0,0 +1,57 @@
# Enable remote debugging using dlv
CERC_REMOTE_DEBUG=false
# Enable startup script debug output.
CERC_SCRIPT_DEBUG=false
# Simple toggle to choose either a 'full' node or an 'archive' node
# (controls the values of --syncmode --gcmode --snapshot)
CERC_GETH_MODE_QUICK_SET=full
# Optional custom node name.
# GETH_NODE_NAME=""
# Specify any other geth CLI options.
GETH_OPTS=""
# --cache
GETH_CACHE=1024
# --cache.database
GETH_CACHE_DB=50
# --cache.gc
GETH_CACHE_GC=25
# --cache.trie
GETH_CACHE_TRIE=15
j
# --datadir
GETH_DATADIR="/data"
# --http.api
GETH_HTTP_API="eth,web3,net"
# --authrpc.jwtsecret
GETH_JWTSECRET="/etc/mainnet-eth/jwtsecret"
# --maxpeers
GETH_MAX_PEERS=100
# --rpc.evmtimeout
GETH_RPC_EVMTIMEOUT=0
# --rpc.gascap
GETH_RPC_GASCAP=0
# --txlookuplimit
GETH_TXLOOKUPLIMIT=0
# --verbosity
GETH_VERBOSITY=3
# --log.vmodule
GETH_VMODULE="rpc/*=4"
# --ws.api
GETH_WS_API="eth,web3,net"

View File

@ -0,0 +1,33 @@
# Enable startup script debug output.
CERC_SCRIPT_DEBUG=false
# Specify any other lighthouse CLI options.
LIGHTHOUSE_OPTS=""
# Override the advertised public IP (optional)
# --enr-address
#LIGHTHOUSE_ENR_ADDRESS=""
# --checkpoint-sync-url
LIGHTHOUSE_CHECKPOINT_SYNC_URL="https://beaconstate.ethstaker.cc"
# --checkpoint-sync-url-timeout
LIGHTHOUSE_CHECKPOINT_SYNC_URL_TIMEOUT=300
# --datadir
LIGHTHOUSE_DATADIR=/data
# --debug-level
LIGHTHOUSE_DEBUG_LEVEL=info
# --http-port
LIGHTHOUSE_HTTP_PORT=5052
# --execution-jwt
LIGHTHOUSE_JWTSECRET=/etc/mainnet-eth/jwtsecret
# --metrics-port
LIGHTHOUSE_METRICS_PORT=5054
# --port --enr-udp-port --enr-tcp-port
LIGHTHOUSE_NETWORK_PORT=9000

View File

@ -1,12 +1,10 @@
#!/bin/sh
if [[ -n "$CERC_SCRIPT_DEBUG" ]]; then
if [[ "true" == "$CERC_SCRIPT_DEBUG" ]]; then
set -x
fi
CERC_ETH_DATADIR=/root/ethdata
START_CMD="geth"
if [ "true" == "$CERC_REMOTE_DEBUG" ] && [ -x "/usr/local/bin/dlv" ]; then
if [[ "true" == "$CERC_REMOTE_DEBUG" ]] && [[ -x "/usr/local/bin/dlv" ]]; then
START_CMD="/usr/local/bin/dlv --listen=:40000 --headless=true --api-version=2 --accept-multiclient exec /usr/local/bin/geth --continue --"
fi
@ -22,29 +20,44 @@ cleanup() {
wait
echo "Done"
}
trap 'cleanup' SIGINT SIGTERM
MODE_FLAGS=""
if [[ "$CERC_GETH_MODE_QUICK_SET" = "archive" ]]; then
MODE_FLAGS="--syncmode=${GETH_SYNC_MODE:-full} --gcmode=${GETH_GC_MODE:-archive} --snapshot=${GETH_SNAPSHOT:-false}"
else
MODE_FLAGS="--syncmode=${GETH_SYNC_MODE:-snap} --gcmode=${GETH_GC_MODE:-full} --snapshot=${GETH_SNAPSHOT:-true}"
fi
$START_CMD \
--datadir="${CERC_ETH_DATADIR}" \
--authrpc.addr="0.0.0.0" \
--authrpc.port 8551 \
--authrpc.vhosts="*" \
--authrpc.jwtsecret="/etc/mainnet-eth/jwtsecret" \
--ws \
--ws.addr="0.0.0.0" \
--ws.origins="*" \
--ws.api="${CERC_GETH_WS_APIS:-eth,web3,net,admin,personal,debug,statediff}" \
--http.corsdomain="*" \
--gcmode full \
--txlookuplimit=0 \
--cache.preimages \
--syncmode=snap \
&
$MODE_FLAGS \
--datadir="${GETH_DATADIR}"\
--identity="${GETH_NODE_NAME}" \
--maxpeers=${GETH_MAX_PEERS} \
--cache=${GETH_CACHE} \
--cache.gc=${GETH_CACHE_GC} \
--cache.database=${GETH_CACHE_DB} \
--cache.trie=${GETH_CACHE_TRIE} \
--authrpc.addr='0.0.0.0' \
--authrpc.vhosts='*' \
--authrpc.jwtsecret="${GETH_JWTSECRET}" \
--http \
--http.addr='0.0.0.0' \
--http.api="${GETH_HTTP_API}" \
--http.vhosts='*' \
--metrics \
--metrics.addr='0.0.0.0' \
--ws \
--ws.addr='0.0.0.0' \
--ws.api="${GETH_WS_API}" \
--rpc.gascap=${GETH_RPC_GASCAP} \
--rpc.evmtimeout=${GETH_RPC_EVMTIMEOUT} \
--txlookuplimit=${GETH_TXLOOKUPLIMIT} \
--verbosity=${GETH_VERBOSITY} \
--log.vmodule="${GETH_VMODULE}" \
${GETH_OPTS} &
geth_pid=$!
wait $geth_pid
if [ "true" == "$CERC_KEEP_RUNNING_AFTER_GETH_EXIT" ]; then

View File

@ -1,22 +1,30 @@
#!/bin/bash
if [[ -n "$CERC_SCRIPT_DEBUG" ]]; then
if [[ "true" == "$CERC_SCRIPT_DEBUG" ]]; then
set -x
fi
DEBUG_LEVEL=${CERC_LIGHTHOUSE_DEBUG_LEVEL:-info}
ENR_OPTS=""
if [[ -n "$LIGHTHOUSE_ENR_ADDRESS" ]]; then
ENR_OPTS="--enr-address $LIGHTHOUSE_ENR_ADDRESS"
fi
data_dir=/var/lighthouse-data-dir
network_port=9001
http_port=8001
authrpc_port=8551
exec lighthouse \
bn \
--debug-level $DEBUG_LEVEL \
--datadir $data_dir \
--network mainnet \
--execution-endpoint $EXECUTION_ENDPOINT \
--execution-jwt /etc/mainnet-eth/jwtsecret \
exec lighthouse bn \
--checkpoint-sync-url "$LIGHTHOUSE_CHECKPOINT_SYNC_URL" \
--checkpoint-sync-url-timeout ${LIGHTHOUSE_CHECKPOINT_SYNC_URL_TIMEOUT} \
--datadir "$LIGHTHOUSE_DATADIR" \
--debug-level $LIGHTHOUSE_DEBUG_LEVEL \
--disable-deposit-contract-sync \
--checkpoint-sync-url https://beaconstate.ethstaker.cc
--disable-upnp \
--enr-tcp-port $LIGHTHOUSE_NETWORK_PORT \
--enr-udp-port $LIGHTHOUSE_NETWORK_PORT \
--execution-endpoint "$LIGHTHOUSE_EXECUTION_ENDPOINT" \
--execution-jwt /etc/mainnet-eth/jwtsecret \
--http \
--http-address 0.0.0.0 \
--http-port $LIGHTHOUSE_HTTP_PORT \
--metrics \
--metrics-address=0.0.0.0 \
--metrics-port $LIGHTHOUSE_METRICS_PORT \
--network mainnet \
--port $LIGHTHOUSE_NETWORK_PORT \
$ENR_OPTS $LIGHTHOUSE_OPTS

View File

@ -0,0 +1,9 @@
#!/usr/bin/env bash
# Build cerc/keycloak-reg-api
source ${CERC_CONTAINER_BASE_DIR}/build-base.sh
# See: https://stackoverflow.com/a/246128/1701505
SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
docker build -t cerc/keycloak-reg-api:local ${build_command_args} ${CERC_REPO_BASE_DIR}/keycloak-reg-api

View File

@ -0,0 +1,9 @@
#!/usr/bin/env bash
# Build cerc/keycloak-reg-ui
source ${CERC_CONTAINER_BASE_DIR}/build-base.sh
# See: https://stackoverflow.com/a/246128/1701505
SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
docker build -t cerc/keycloak-reg-ui:local ${build_command_args} ${CERC_REPO_BASE_DIR}/keycloak-reg-ui

View File

@ -5,11 +5,19 @@ repos:
- github.com/cerc-io/go-ethereum
- github.com/cerc-io/lighthouse
- github.com/dboreham/foundry
- git.vdb.to/cerc-io/keycloak-reg-api
- git.vdb.to/cerc-io/keycloak-reg-ui
containers:
- cerc/go-ethereum
- cerc/lighthouse
- cerc/lighthouse-cli
- cerc/foundry
- cerc/keycloak
- cerc/webapp-base
- cerc/keycloak-reg-api
- cerc/keycloak-reg-ui
pods:
- mainnet-eth
- mainnet-eth-keycloak
- mainnet-eth-metrics
- foundry