Merge Ci test branch fixes #717

Merged
telackey merged 67 commits from ci-test into main 2024-01-30 18:18:08 +00:00
522 changed files with 3966 additions and 846 deletions
Showing only changes of commit 1d78430679 - Show all commits

View File

@ -2,7 +2,10 @@ name: Fixturenet-Eth-Plugeth-Test
on:
push:
branches: 'ci-test'
branches: '*'
paths:
- '!**'
- '.gitea/workflows/triggers/fixturenet-eth-plugeth-test'
# Needed until we can incorporate docker startup into the executor container
env:
@ -16,8 +19,16 @@ jobs:
steps:
- name: "Clone project repository"
uses: actions/checkout@v3
- name: "Install Python"
uses: cerc-io/setup-python@v4
# At present the stock setup-python action fails on Linux/aarch64
# Conditional steps below workaroud this by using deadsnakes for that case only
- name: "Install Python for ARM on Linux"
if: ${{ runner.arch == 'arm64' && runner.os == 'Linux' }}
uses: deadsnakes/action@v3.0.1
with:
python-version: '3.8'
- name: "Install Python cases other than ARM on Linux"
if: ${{ ! (runner.arch == 'arm64' && runner.os == 'Linux') }}
uses: actions/setup-python@v4
with:
python-version: '3.8'
- name: "Print Python version"

View File

@ -2,7 +2,10 @@ name: Fixturenet-Eth-Test
on:
push:
branches: 'ci-test'
branches: '*'
paths:
- '!**'
- '.gitea/workflows/triggers/fixturenet-eth-test'
# Needed until we can incorporate docker startup into the executor container
env:
@ -16,7 +19,15 @@ jobs:
steps:
- name: "Clone project repository"
uses: actions/checkout@v3
- name: "Install Python"
# At present the stock setup-python action fails on Linux/aarch64
# Conditional steps below workaroud this by using deadsnakes for that case only
- name: "Install Python for ARM on Linux"
if: ${{ runner.arch == 'arm64' && runner.os == 'Linux' }}
uses: deadsnakes/action@v3.0.1
with:
python-version: '3.8'
- name: "Install Python cases other than ARM on Linux"
if: ${{ ! (runner.arch == 'arm64' && runner.os == 'Linux') }}
uses: actions/setup-python@v4
with:
python-version: '3.8'

View File

@ -0,0 +1,55 @@
name: Fixturenet-Laconicd-Test
on:
push:
branches: '*'
paths:
- '!**'
- '.gitea/workflows/triggers/fixturenet-laconicd-test'
# Needed until we can incorporate docker startup into the executor container
env:
DOCKER_HOST: unix:///var/run/dind.sock
jobs:
test:
name: "Run an Laconicd fixturenet test"
runs-on: ubuntu-latest
steps:
- name: 'Update'
run: apt-get update
- name: 'Setup jq'
run: apt-get install jq -y
- name: 'Check jq'
run: |
which jq
jq --version
- name: "Clone project repository"
uses: actions/checkout@v3
# At present the stock setup-python action fails on Linux/aarch64
# Conditional steps below workaroud this by using deadsnakes for that case only
- name: "Install Python for ARM on Linux"
if: ${{ runner.arch == 'arm64' && runner.os == 'Linux' }}
uses: deadsnakes/action@v3.0.1
with:
python-version: '3.8'
- name: "Install Python cases other than ARM on Linux"
if: ${{ ! (runner.arch == 'arm64' && runner.os == 'Linux') }}
uses: actions/setup-python@v4
with:
python-version: '3.8'
- name: "Print Python version"
run: python3 --version
- name: "Install shiv"
run: pip install shiv
- name: "Generate build version file"
run: ./scripts/create_build_tag_file.sh
- name: "Build local shiv package"
run: ./scripts/build_shiv_package.sh
- name: Start dockerd # Also needed until we can incorporate into the executor
run: |
dockerd -H $DOCKER_HOST --userland-proxy=false &
sleep 5
- name: "Run fixturenet-laconicd tests"
run: ./tests/fixturenet-laconicd/run-test.sh

View File

@ -5,6 +5,8 @@ on:
branches:
- main
- publish-test
paths-ignore:
- '.gitea/workflows/triggers/*'
jobs:
publish:
@ -18,7 +20,15 @@ jobs:
run: |
build_tag=$(./scripts/create_build_tag_file.sh)
echo "build-tag=v${build_tag}" >> $GITHUB_OUTPUT
- name: "Install Python"
# At present the stock setup-python action fails on Linux/aarch64
# Conditional steps below workaroud this by using deadsnakes for that case only
- name: "Install Python for ARM on Linux"
if: ${{ runner.arch == 'arm64' && runner.os == 'Linux' }}
uses: deadsnakes/action@v3.0.1
with:
python-version: '3.8'
- name: "Install Python cases other than ARM on Linux"
if: ${{ ! (runner.arch == 'arm64' && runner.os == 'Linux') }}
uses: actions/setup-python@v4
with:
python-version: '3.8'

View File

@ -7,6 +7,8 @@ on:
branches:
- main
- ci-test
paths-ignore:
- '.gitea/workflows/triggers/*'
# Needed until we can incorporate docker startup into the executor container
env:
@ -19,7 +21,15 @@ jobs:
steps:
- name: "Clone project repository"
uses: actions/checkout@v3
- name: "Install Python"
# At present the stock setup-python action fails on Linux/aarch64
# Conditional steps below workaroud this by using deadsnakes for that case only
- name: "Install Python for ARM on Linux"
if: ${{ runner.arch == 'arm64' && runner.os == 'Linux' }}
uses: deadsnakes/action@v3.0.1
with:
python-version: '3.8'
- name: "Install Python cases other than ARM on Linux"
if: ${{ ! (runner.arch == 'arm64' && runner.os == 'Linux') }}
uses: actions/setup-python@v4
with:
python-version: '3.8'

View File

@ -0,0 +1,55 @@
name: K8s Deploy Test
on:
pull_request:
branches: '*'
push:
branches:
- main
- ci-test
paths-ignore:
- '.gitea/workflows/triggers/*'
# Needed until we can incorporate docker startup into the executor container
env:
DOCKER_HOST: unix:///var/run/dind.sock
jobs:
test:
name: "Run deploy test suite"
runs-on: ubuntu-latest
steps:
- name: "Clone project repository"
uses: actions/checkout@v3
# At present the stock setup-python action fails on Linux/aarch64
# Conditional steps below workaroud this by using deadsnakes for that case only
- name: "Install Python for ARM on Linux"
if: ${{ runner.arch == 'arm64' && runner.os == 'Linux' }}
uses: deadsnakes/action@v3.0.1
with:
python-version: '3.8'
- name: "Install Python cases other than ARM on Linux"
if: ${{ ! (runner.arch == 'arm64' && runner.os == 'Linux') }}
uses: actions/setup-python@v4
with:
python-version: '3.8'
- name: "Print Python version"
run: python3 --version
- name: "Install shiv"
run: pip install shiv
- name: "Generate build version file"
run: ./scripts/create_build_tag_file.sh
- name: "Build local shiv package"
run: ./scripts/build_shiv_package.sh
- name: Start dockerd # Also needed until we can incorporate into the executor
run: |
dockerd -H $DOCKER_HOST --userland-proxy=false &
sleep 5
- name: "Install Go"
uses: actions/setup-go@v4
with:
go-version: '1.21'
- name: "Install Kind"
run: go install sigs.k8s.io/kind@v0.20.0
- name: "Debug Kind"
run: kind create cluster --retain && docker logs kind-control-plane

View File

@ -0,0 +1,49 @@
name: Webapp Test
on:
pull_request:
branches: '*'
push:
branches:
- main
- ci-test
paths-ignore:
- '.gitea/workflows/triggers/*'
# Needed until we can incorporate docker startup into the executor container
env:
DOCKER_HOST: unix:///var/run/dind.sock
jobs:
test:
name: "Run webapp test suite"
runs-on: ubuntu-latest
steps:
- name: "Clone project repository"
uses: actions/checkout@v3
# At present the stock setup-python action fails on Linux/aarch64
# Conditional steps below workaroud this by using deadsnakes for that case only
- name: "Install Python for ARM on Linux"
if: ${{ runner.arch == 'arm64' && runner.os == 'Linux' }}
uses: deadsnakes/action@v3.0.1
with:
python-version: '3.8'
- name: "Install Python cases other than ARM on Linux"
if: ${{ ! (runner.arch == 'arm64' && runner.os == 'Linux') }}
uses: actions/setup-python@v4
with:
python-version: '3.8'
- name: "Print Python version"
run: python3 --version
- name: "Install shiv"
run: pip install shiv
- name: "Generate build version file"
run: ./scripts/create_build_tag_file.sh
- name: "Build local shiv package"
run: ./scripts/build_shiv_package.sh
- name: Start dockerd # Also needed until we can incorporate into the executor
run: |
dockerd -H $DOCKER_HOST --userland-proxy=false &
sleep 5
- name: "Run webapp tests"
run: ./tests/webapp-test/run-webapp-test.sh

View File

@ -7,6 +7,8 @@ on:
branches:
- main
- ci-test
paths-ignore:
- '.gitea/workflows/triggers/*'
# Needed until we can incorporate docker startup into the executor container
env:
@ -19,7 +21,15 @@ jobs:
steps:
- name: "Clone project repository"
uses: actions/checkout@v3
- name: "Install Python"
# At present the stock setup-python action fails on Linux/aarch64
# Conditional steps below workaroud this by using deadsnakes for that case only
- name: "Install Python for ARM on Linux"
if: ${{ runner.arch == 'arm64' && runner.os == 'Linux' }}
uses: deadsnakes/action@v3.0.1
with:
python-version: '3.8'
- name: "Install Python cases other than ARM on Linux"
if: ${{ ! (runner.arch == 'arm64' && runner.os == 'Linux') }}
uses: actions/setup-python@v4
with:
python-version: '3.8'
@ -38,3 +48,4 @@ jobs:
- name: "Run smoke tests"
run: ./tests/smoke-test/run-smoke-test.sh

View File

@ -0,0 +1,2 @@
Change this file to trigger running the fixturenet-eth-plugeth-test CI job
trigger

View File

@ -0,0 +1,2 @@
Change this file to trigger running the fixturenet-eth-test CI job

View File

@ -0,0 +1,2 @@
Change this file to trigger running the fixturenet-laconicd-test CI job

30
.github/workflows/fixturenet-eth.yml vendored Normal file
View File

@ -0,0 +1,30 @@
name: Fixturenet-Eth Test
on:
push:
branches: '*'
paths:
- '!**'
- '.github/workflows/triggers/fixturenet-eth-test'
jobs:
test:
name: "Run fixturenet-eth test suite"
runs-on: ubuntu-latest
steps:
- name: "Clone project repository"
uses: actions/checkout@v3
- name: "Install Python"
uses: actions/setup-python@v4
with:
python-version: '3.8'
- name: "Print Python version"
run: python3 --version
- name: "Install shiv"
run: pip install shiv
- name: "Generate build version file"
run: ./scripts/create_build_tag_file.sh
- name: "Build local shiv package"
run: ./scripts/build_shiv_package.sh
- name: "Run fixturenet-eth tests"
run: ./tests/fixturenet-eth/run-test.sh

View File

@ -0,0 +1,30 @@
name: Fixturenet-Laconicd Test
on:
push:
branches: '*'
paths:
- '!**'
- '.github/workflows/triggers/fixturenet-laconicd-test'
jobs:
test:
name: "Run fixturenet-laconicd test suite"
runs-on: ubuntu-latest
steps:
- name: "Clone project repository"
uses: actions/checkout@v3
- name: "Install Python"
uses: actions/setup-python@v4
with:
python-version: '3.8'
- name: "Print Python version"
run: python3 --version
- name: "Install shiv"
run: pip install shiv
- name: "Generate build version file"
run: ./scripts/create_build_tag_file.sh
- name: "Build local shiv package"
run: ./scripts/build_shiv_package.sh
- name: "Run fixturenet-laconicd tests"
run: ./tests/fixturenet-laconicd/run-test.sh

29
.github/workflows/test-webapp.yml vendored Normal file
View File

@ -0,0 +1,29 @@
name: Webapp Test
on:
pull_request:
branches: '*'
push:
branches: '*'
jobs:
test:
name: "Run webapp test suite"
runs-on: ubuntu-latest
steps:
- name: "Clone project repository"
uses: actions/checkout@v3
- name: "Install Python"
uses: actions/setup-python@v4
with:
python-version: '3.8'
- name: "Print Python version"
run: python3 --version
- name: "Install shiv"
run: pip install shiv
- name: "Generate build version file"
run: ./scripts/create_build_tag_file.sh
- name: "Build local shiv package"
run: ./scripts/build_shiv_package.sh
- name: "Run webapp tests"
run: ./tests/webapp-test/run-webapp-test.sh

View File

@ -0,0 +1,2 @@
Change this file to trigger running the fixturenet-eth-test CI job

View File

@ -0,0 +1,3 @@
Change this file to trigger running the fixturenet-laconicd-test CI job
trigger

4
.gitignore vendored
View File

@ -6,5 +6,5 @@ laconic_stack_orchestrator.egg-info
__pycache__
*~
package
app/data/build_tag.txt
build
stack_orchestrator/data/build_tag.txt
/build

View File

@ -52,7 +52,7 @@ Version: 1.1.0-7a607c2-202304260513
Save the distribution url to `~/.laconic-so/config.yml`:
```bash
mkdir ~/.laconic-so
echo "distribution-url: https://github.com/cerc-io/stack-orchestrator/releases/latest/download/laconic-so" > ~/.laconic-so/config.yml"
echo "distribution-url: https://github.com/cerc-io/stack-orchestrator/releases/latest/download/laconic-so" > ~/.laconic-so/config.yml
```
### Update
@ -64,12 +64,12 @@ laconic-so update
## Usage
The various [stacks](/app/data/stacks) each contain instructions for running different stacks based on your use case. For example:
The various [stacks](/stack_orchestrator/data/stacks) each contain instructions for running different stacks based on your use case. For example:
- [self-hosted Gitea](/app/data/stacks/build-support)
- [an Optimism Fixturenet](/app/data/stacks/fixturenet-optimism)
- [laconicd with console and CLI](app/data/stacks/fixturenet-laconic-loaded)
- [kubo (IPFS)](app/data/stacks/kubo)
- [self-hosted Gitea](/stack_orchestrator/data/stacks/build-support)
- [an Optimism Fixturenet](/stack_orchestrator/data/stacks/fixturenet-optimism)
- [laconicd with console and CLI](stack_orchestrator/data/stacks/fixturenet-laconic-loaded)
- [kubo (IPFS)](stack_orchestrator/data/stacks/kubo)
## Contributing

View File

@ -1,27 +0,0 @@
version: '3.7'
services:
nitro-reverse-payment-proxy:
image: cerc/go-nitro:local
hostname: nitro-reverse-payment-proxy
restart: on-failure
depends_on:
# Wait for the go-nitro node to start
go-nitro:
condition: service_healthy
environment:
CERC_PROXY_ADDRESS: 0.0.0.0:8081
CERC_PROXY_NITRO_ENDPOINT: ${CERC_PROXY_NITRO_ENDPOINT:-go-nitro:4005/api/v1}
CERC_PROXY_DESTINATION_URL: ${CERC_PROXY_DESTINATION_URL:-http://ipld-eth-server:8081}
CERC_PROXY_COST_PER_BYTE: ${CERC_PROXY_COST_PER_BYTE:-1}
entrypoint: ["bash", "-c", "/app/run-reverse-payment-proxy.sh"]
volumes:
- ../config/go-nitro/run-reverse-payment-proxy.sh:/app/run-reverse-payment-proxy.sh
healthcheck:
test: ["CMD", "nc", "-vz", "localhost", "8081"]
interval: 10s
timeout: 5s
retries: 10
start_period: 10s
ports:
- "8081:8081"

View File

@ -1,63 +0,0 @@
version: '3.7'
services:
ponder-app-indexer:
hostname: ponder-app-indexer
restart: unless-stopped
image: cerc/ponder:local
working_dir: /app/examples/token-erc20
environment:
CERC_PONDER_CHAIN_ID: ${PONDER_CHAIN_ID:-99}
CERC_PONDER_RPC_URL_1: ${PONDER_RPC_URL_1:-http://nitro-reverse-payment-proxy:8081}
CERC_PONDER_NITRO_PK: ${CERC_PONDER_INDEXER_NITRO_PK:-58368d20ff12f17669c06158c21d885897aa56f9be430edc789614bf9851d53f}
CERC_PONDER_NITRO_CHAIN_PK: ${CERC_PONDER_INDEXER_NITRO_CHAIN_PK:-fb1e9af328c283ca3e2486e7c24d13582b7912057d8b9542ff41503c85bc05c0}
CERC_PONDER_NITRO_CHAIN_URL: ${CERC_PONDER_NITRO_CHAIN_URL:-http://fixturenet-eth-geth-1:8546}
CERC_RELAY_MULTIADDR: ${CERC_RELAY_MULTIADDR}
CERC_UPSTREAM_NITRO_ADDRESS: ${CERC_UPSTREAM_NITRO_ADDRESS:-0xAAA6628Ec44A8a742987EF3A114dDFE2D4F7aDCE}
CERC_UPSTREAM_NITRO_MULTIADDR: ${CERC_UPSTREAM_NITRO_MULTIADDR:-/dns4/go-nitro/tcp/5005/ws/p2p/16Uiu2HAmSjXJqsyBJgcBUU2HQmykxGseafSatbpq5471XmuaUqyv}
CERC_UPSTREAM_NITRO_PAY_AMOUNT: ${CERC_UPSTREAM_NITRO_PAY_AMOUNT:-5000}
command: ["bash", "./ponder-start.sh"]
volumes:
- ../config/ponder/ponder-start.sh:/app/examples/token-erc20/ponder-start.sh
- ../config/ponder/ponder.indexer.config.ts:/app/examples/token-erc20/ponder.config.ts
- peers_ids:/peers
- nitro_deployment:/nitro
- ponder_indexer_nitro_data:/app/examples/token-erc20/.ponder/nitro-db
ports:
- "42070"
extra_hosts:
- "host.docker.internal:host-gateway"
ponder-app-watcher:
hostname: ponder-app-watcher
depends_on:
- ponder-app-indexer
restart: unless-stopped
image: cerc/ponder:local
working_dir: /app/examples/token-erc20
environment:
CERC_PONDER_CHAIN_ID: ${PONDER_CHAIN_ID:-99}
CERC_PONDER_NITRO_PK: ${CERC_PONDER_WATCHER_NITRO_PK:-febb3b74b0b52d0976f6571d555f4ac8b91c308dfa25c7b58d1e6a7c3f50c781}
CERC_PONDER_NITRO_CHAIN_PK: ${CERC_PONDER_WATCHER_NITRO_CHAIN_PK:-be4aa664815ea3bc3d63118649a733f6c96b243744310806ecb6d96359ab62cf}
CERC_PONDER_NITRO_CHAIN_URL: ${CERC_PONDER_NITRO_CHAIN_URL:-http://fixturenet-eth-geth-1:8546}
CERC_RELAY_MULTIADDR: ${CERC_RELAY_MULTIADDR}
CERC_INDEXER_GQL_ENDPOINT: ${CERC_INDEXER_GQL_ENDPOINT:-http://ponder-app-indexer:42070/graphql}
CERC_INDEXER_NITRO_ADDRESS: ${CERC_INDEXER_NITRO_ADDRESS:-0x67D5b55604d1aF90074FcB69b8C51838FFF84f8d}
CERC_INDEXER_NITRO_PAY_AMOUNT: ${CERC_INDEXER_NITRO_PAY_AMOUNT:-50}
command: ["bash", "./ponder-start.sh"]
volumes:
- ../config/ponder/ponder-start.sh:/app/examples/token-erc20/ponder-start.sh
- ../config/ponder/ponder.watcher.config.ts:/app/examples/token-erc20/ponder.config.ts
- peers_ids:/peers
- nitro_deployment:/nitro
- ponder_watcher_nitro_data:/app/examples/token-erc20/.ponder/nitro-db
ports:
- "42069"
extra_hosts:
- "host.docker.internal:host-gateway"
volumes:
peers_ids:
nitro_deployment:
ponder_indexer_nitro_data:
ponder_watcher_nitro_data:

View File

@ -1,14 +0,0 @@
#!/bin/bash
set -e
if [ -n "$CERC_SCRIPT_DEBUG" ]; then
set -x
fi
echo "Running Nitro reverse payment proxy"
echo "Using CERC_PROXY_ADDRESS ${CERC_PROXY_ADDRESS}"
echo "Using CERC_PROXY_NITRO_ENDPOINT ${CERC_PROXY_NITRO_ENDPOINT}"
echo "Using CERC_PROXY_DESTINATION_URL ${CERC_PROXY_DESTINATION_URL}"
echo "Using CERC_PROXY_COST_PER_BYTE ${CERC_PROXY_COST_PER_BYTE}"
./proxy -proxyaddress ${CERC_PROXY_ADDRESS} -nitroendpoint=${CERC_PROXY_NITRO_ENDPOINT} -destinationurl=${CERC_PROXY_DESTINATION_URL} -costperbyte ${CERC_PROXY_COST_PER_BYTE} -enablepaidrpcmethods

View File

@ -1,6 +0,0 @@
#!/usr/bin/env bash
# Build cerc/go-nitro
source ${CERC_CONTAINER_BASE_DIR}/build-base.sh
docker build -t cerc/go-nitro:local -f ${CERC_REPO_BASE_DIR}/go-nitro/docker/local/Dockerfile ${build_command_args} ${CERC_REPO_BASE_DIR}/go-nitro

View File

@ -1,29 +0,0 @@
#!/usr/bin/env bash
# Create some demo/test records in the registry
set -e
if [ -n "$CERC_SCRIPT_DEBUG" ]; then
set -x
fi
registry_command="laconic cns"
record_1_filename=demo-record-1.yml
cat <<EOF > ${record_1_filename}
record:
type: WebsiteRegistrationRecord
url: 'https://cerc.io'
repo_registration_record_cid: QmSnuWmxptJZdLJpKRarxBMS2Ju2oANVrgbr2xWbie9b2D
build_artifact_cid: QmP8jTG1m9GSDJLCbeWhVSVgEzCPPwXRdCRuJtQ5Tz9Kc9
tls_cert_cid: QmbWqxBEKC3P8tqsKc98xmWNzrzDtRLMiMPL8wBuTGsMnR
version: 1.0.23
EOF
# Check we have funds
funds_response=$(${registry_command} account get --address $(cat my-address.txt))
funds_balance=$(echo ${funds_response} | jq -r .[0].balance[0].quantity)
echo "Balance is: ${funds_balance}"
# Create a bond
bond_create_result=$(${registry_command} bond create --type aphoton --quantity 1000000000)
bond_id=$(echo ${bond_create_result} | jq -r .bondId)
echo "Created bond with id: ${bond_id}"
# Publish a demo record
publish_response=$(${registry_command} record publish --filename ${record_1_filename} --bond-id ${bond_id})
published_record_id=$(echo ${publish_response} | jq -r .id)
echo "Published ${record_1_filename} with id: ${published_record_id}"

View File

@ -1,13 +0,0 @@
FROM node:16.17.1-alpine3.16
RUN apk --update --no-cache add git python3 alpine-sdk
WORKDIR /app
COPY . .
RUN echo "Building watcher-ts" && \
git checkout v0.2.19 && \
yarn && yarn build
WORKDIR /app/packages/erc20-watcher

View File

@ -1,57 +0,0 @@
# fixturenet-payments
Instructions to setup and deploy an end-to-end fixturenet-payments stack
## Setup
Clone required repositories:
```bash
laconic-so --stack fixturenet-payments setup-repositories --pull
```
Build the container images:
```bash
laconic-so --stack fixturenet-payments build-containers
```
## Deploy
Deploy the stack:
```bash
laconic-so --stack fixturenet-payments deploy --cluster payments up
# Exposed on host ports:
# 4005: go-nitro node's RPC endpoint
# 5005: go-nitro node's p2p endpoint
# 8081: reverse payment proxy's RPC endpoint
# 15432: MobyMask v3 watcher's db endpoint
# 3001: MobyMask v3 watcher endpoint
# 9090: MobyMask v3 watcher relay node endpoint
# 8080: MobyMask snap
# 3004: MobyMask v3 app
```
## Demo
Follow the [demo](./demo.md) to try out end-to-end payments
## Clean up
Stop all the services running in background:
```bash
laconic-so --stack fixturenet-payments deploy --cluster payments down 30
```
Clear volumes created by this stack:
```bash
# List all relevant volumes
docker volume ls -q --filter "name=[payments"
# Remove all the listed volumes
docker volume rm $(docker volume ls -q --filter "name=[payments")
```

View File

@ -1,310 +0,0 @@
# Demo
Stack components:
* `ipld-eth-db` database for statediffed data
* Local geth + lighthouse blockchain "fixturenet" running in statediffing mode
* `ipld-eth-server` which runs an ETH RPC API and a GQL server; serves data from `ipld-eth-db`
* A go-nitro deployment acting as the Nitro node for `ipld-eth-server`
* A modified reverse payment proxy server (based on the one from go-nitro) that proxies requests to `ipld-eth-server`'s RPC endpoint; it talks to `ipld-eth-server`'s Nitro node to accept and validate payments required for configured RPC requests
* A MobyMask v3 watcher that pays the `ipld-eth-server` for ETH RPC requests
* A MobyMask v3 app that pays the watcher for reads (GQL queries) and writes
* An example ERC20 Ponder app that pays the `ipld-eth-server` for ETH RPC requests
## Setup
* On starting the stack, MobyMask watcher creates a payment channel with the `ipld-eth-server`'s Nitro node. Check watcher logs and wait for the same:
```bash
docker logs -f $(docker ps -aq --filter name="mobymask-watcher-server")
# Expected output:
# vulcanize:server Peer ID: 12D3KooWKLqLWU82VU7jmsmQMruRvZWhoBoVsf1UHchM5Nuq9ymY
# vulcanize:server Using chain URL http://fixturenet-eth-geth-1:8546 for Nitro node
# ...
# ts-nitro:util:nitro Ledger channel created with id 0x65703ccdfacab09ac35367bdbe6c5a337e7a6651aad526807607b1c59b28bc1e
# ...
# ts-nitro:util:nitro Virtual payment channel created with id 0x29ff1335d73391a50e8fde3e9b34f00c3d81c39ddc7f89187f44dd51df96140e
# vulcanize:server Starting server... +0ms
```
* Keep the above command running to keep track of incoming payments and GQL requests from the MobyMask app
* In another terminal, export the payment channel id to a variable:
```bash
export WATCHER_UPSTREAM_PAYMENT_CHANNEL=<PAYMENT_CHANNEL_ID>
```
* Check the payment channel status:
```bash
docker exec payments-nitro-rpc-client-1 npm exec -c "nitro-rpc-client get-payment-channel $WATCHER_UPSTREAM_PAYMENT_CHANNEL -h go-nitro -p 4005"
# Expected output:
# {
# ID: '0x8c0d17639bd2ba07dbcd248304a8f3c6c7276bfe25c2b87fe41f461e20f33f01',
# Status: 'Open',
# Balance: {
# AssetAddress: '0x0000000000000000000000000000000000000000',
# Payee: '0xaaa6628ec44a8a742987ef3a114ddfe2d4f7adce',
# Payer: '0xbbb676f9cff8d242e9eac39d063848807d3d1d94',
# PaidSoFar: 0n,
# RemainingFunds: 1000000000n
# }
# }
```
* In another terminal, check the reverse payment proxy server's logs to keep track of incoming payments and RPC requests:
```bash
docker logs -f $(docker ps -aq --filter name="nitro-reverse-payment-proxy")
```
* MetaMask flask wallet setup for running the MobyMask app:
* Get the geth nodes port mapped to host:
```bash
docker port payments-fixturenet-eth-geth-1-1 8545
```
* In MetaMask, add a custom network with the following settings:
```bash
# Network name
Local fixturenet
# New RPC URL
http://127.0.0.1:<GETH_PORT>
# Chain ID
1212
# Currency symbol
ETH
```
* Import a faucet account with the following private key:
```bash
# Faucet PK
# 0x570b909da9669b2f35a0b1ac70b8358516d55ae1b5b3710e95e9a94395090597
```
* Create an additional account for usage in the app; fund it from the faucet account
* Get the generated root invite link for MobyMask from contract deployment container logs:
```bash
docker logs -f $(docker ps -aq --filter name="mobymask-1")
# Expected output:
# ...
# "key": "0x60e706fda4639fe0a8eb102cb0ce81231cf6e819f41cb4eadf72d865ea4c11ad"
# }
# http://127.0.0.1:3004/#/members?invitation=<INVITATION>
```
## Run
### MobyMask App
* Open app in a browser (where MetaMask was setup) using the invite link
* Run the following in browser console to enable logs:
```bash
localStorage.debug = 'ts-nitro:*'
# Refresh the tab for taking effect
```
* In the apps debug panel, check that the peer gets connected to relay node and watcher peer
* Open the `NITRO` tab in debug panel
* Click on `Connect Wallet` to connect to MetaMask (make sure that the newly funded account is active)
* Click on `Connect Snap` to install/connect snap
* Perform `DIRECT FUND` with the preset amount and wait for the MetaMask confirmation prompt to appear; confirm the transaction and wait for a ledger channel to be created with the watcher
* Perform `VIRTUAL FUND` with amount set to `10000` and wait for a payment channel to be created with the watcher
* Perform phisher status check queries now that a payment channel is created:
* Check the watcher logs for received payments along with the GQL queries:
```bash
# Expected output:
# ...
# laconic:payments Serving a paid query for 0x86804299822212c070178B5135Ba6DdAcFC357D3
# vulcanize:resolver isPhisher 0x98ae4f9e9d01cc892adfe6871e1db0287039e0c183d3b5bb31d724228c114744 0x2B6AFbd4F479cE4101Df722cF4E05F941523EaD9 TWT:ash1
# vulcanize:indexer isPhisher: db miss, fetching from upstream server
# laconic:payments Making RPC call: eth_chainId
# laconic:payments Making RPC call: eth_getBlockByHash
# laconic:payments Making RPC call: eth_chainId
# laconic:payments Making RPC call: eth_getStorageAt
```
* The watcher makes several ETH RPC requests to `ipld-eth-server` to fetch data required for satisfying the GQL request(s); check the payment proxy server logs for charged RPC requests (`eth_getBlockByHash`, `eth_getBlockByNumber`, `eth_getStorageAt`):
```bash
# Expected output:
# ...
# {"time":"2023-10-06T06:46:52.769009314Z","level":"DEBUG","msg":"Serving RPC request","method":"eth_chainId"}
# {"time":"2023-10-06T06:46:52.773006426Z","level":"DEBUG","msg":"Serving RPC request","method":"eth_getBlockByNumber"}
# {"time":"2023-10-06T06:46:52.811142054Z","level":"DEBUG","msg":"Request cost","cost-per-byte":1,"response-length":1480,"cost":1480,"method":"eth_getBlockByNumber"}
# {"time":"2023-10-06T06:46:52.811418494Z","level":"DEBUG","msg":"sent message","address":"0xAAA6628Ec44A8a742987EF3A114dDFE2D4F7aDCE","method":"receive_voucher"}
# {"time":"2023-10-06T06:46:52.812557482Z","level":"DEBUG","msg":"Received voucher","delta":5000}
# ...
# {"time":"2023-10-06T06:46:52.87525215Z","level":"DEBUG","msg":"Serving RPC request","method":"eth_getStorageAt"}
# {"time":"2023-10-06T06:46:52.882859654Z","level":"DEBUG","msg":"Request cost","cost-per-byte":1,"response-length":104,"cost":104,"method":"eth_getStorageAt"}
# {"time":"2023-10-06T06:46:52.882946485Z","level":"DEBUG","msg":"sent message","address":"0xAAA6628Ec44A8a742987EF3A114dDFE2D4F7aDCE","method":"receive_voucher"}
# {"time":"2023-10-06T06:46:52.884012641Z","level":"DEBUG","msg":"Received voucher","delta":5000}
# {"time":"2023-10-06T06:46:52.884032961Z","level":"DEBUG","msg":"Destination request","url":"http://ipld-eth-server:8081/"}
```
* Change the amount besides `PAY` button in debug panel to `>=100` for phisher reports next
* Perform a phisher report and check the watcher logs for received payments:
```bash
# Expected output:
# ...
# vulcanize:libp2p-utils [6:50:2] Received a message on mobymask P2P network from peer: 12D3KooWRkxV9SX8uTUZYkbRjai4Fsn7yavB61J5TMnksixsabsP
# ts-nitro:engine {"msg":"Received message","_msg":{"to":"0xBBB676","from":"0x868042","payloadSummaries":[],"proposalSummaries":[],"payments":[{"amount":200,"channelId":"0x557153d729cf3323c0bdb40a36b245f98c2d4562933ba2182c9d61c5cfeda948"}],"rejectedObjectives":[]}}
# laconic:payments Received a payment voucher of 100 from 0x86804299822212c070178B5135Ba6DdAcFC357D3
# vulcanize:libp2p-utils Payment received for a mutation request from 0x86804299822212c070178B5135Ba6DdAcFC357D3
# vulcanize:libp2p-utils Transaction receipt for invoke message {
# to: '0x2B6AFbd4F479cE4101Df722cF4E05F941523EaD9',
# blockNumber: 232,
# blockHash: '0x6a188722c102662ea48af3786fe9db0d4b6c7ab7b27473eb0e628cf95746a244',
# transactionHash: '0x6521205db8a905b3222adc2b6855f9b2abc72580624d299bec2a35bcba173efa',
# effectiveGasPrice: '1500000007',
# gasUsed: '113355'
# }
```
* Check the watcher - ipld-eth-server payment channel status after a few requests:
```bash
docker exec payments-nitro-rpc-client-1 npm exec -c "nitro-rpc-client get-payment-channel $WATCHER_UPSTREAM_PAYMENT_CHANNEL -h go-nitro -p 4005"
# Expected output ('PaidSoFar' should be non zero):
# {
# ID: '0x8c0d17639bd2ba07dbcd248304a8f3c6c7276bfe25c2b87fe41f461e20f33f01',
# Status: 'Open',
# Balance: {
# AssetAddress: '0x0000000000000000000000000000000000000000',
# Payee: '0xaaa6628ec44a8a742987ef3a114ddfe2d4f7adce',
# Payer: '0xbbb676f9cff8d242e9eac39d063848807d3d1d94',
# PaidSoFar: 30000n,
# RemainingFunds: 999970000n
# }
# }
```
### ERC20 Ponder App
* Run the ponder app in indexer mode:
```bash
docker exec -it payments-ponder-app-indexer-1 bash -c "DEBUG=laconic:payments pnpm start"
# Expected output:
# 08:00:28.701 INFO payment Nitro node setup with address 0x67D5b55604d1aF90074FcB69b8C51838FFF84f8d
# laconic:payments Starting voucher subscription... +0ms
# ...
# 09:58:54.288 INFO payment Creating ledger channel with nitro node 0xAAA6628Ec44A8a742987EF3A114dDFE2D4F7aDCE
# ...
# 09:59:14.230 INFO payment Creating payment channel with nitro node 0xAAA6628Ec44A8a742987EF3A114dDFE2D4F7aDCE
# ...
# 09:59:14.329 INFO payment Using payment channel 0x10f049519bc3f862e2b26e974be8666886228f30ea54aab06e2f23718afffab0
```
* Export the payment channel id to a variable:
```bash
export PONDER_UPSTREAM_PAYMENT_CHANNEL=<PAYMENT_CHANNEL_ID>
```
* On starting the Ponder app in indexer mode, it creates a payment channel with the `ipld-eth-server`'s Nitro node and then starts the historical sync service
* The sync service makes several ETH RPC requests to the `ipld-eth-server` to fetch required data; check the payment proxy server logs for charged RPC requests (`eth_getBlockByNumber`, `eth_getLogs`)
```bash
# Expected output:
# ...
# {"time":"2023-10-06T06:51:45.214478402Z","level":"DEBUG","msg":"Serving RPC request","method":"eth_getBlockByNumber"}
# {"time":"2023-10-06T06:51:45.22251171Z","level":"DEBUG","msg":"Request cost","cost-per-byte":1,"response-length":576,"cost":576,"method":"eth_getBlockByNumber"}
# {"time":"2023-10-06T06:51:45.222641963Z","level":"DEBUG","msg":"sent message","address":"0xAAA6628Ec44A8a742987EF3A114dDFE2D4F7aDCE","method":"receive_voucher"}
# {"time":"2023-10-06T06:51:45.224042391Z","level":"DEBUG","msg":"Received voucher","delta":5000}
# {"time":"2023-10-06T06:51:45.224061411Z","level":"DEBUG","msg":"Destination request","url":"http://ipld-eth-server:8081/"}
# {"time":"2023-10-06T06:51:45.242064953Z","level":"DEBUG","msg":"Serving RPC request","method":"eth_getLogs"}
# {"time":"2023-10-06T06:51:45.249118517Z","level":"DEBUG","msg":"Request cost","cost-per-byte":1,"response-length":61,"cost":61,"method":"eth_getLogs"}
# {"time":"2023-10-06T06:51:45.249189892Z","level":"DEBUG","msg":"sent message","address":"0xAAA6628Ec44A8a742987EF3A114dDFE2D4F7aDCE","method":"receive_voucher"}
# {"time":"2023-10-06T06:51:45.249743149Z","level":"DEBUG","msg":"Received voucher","delta":5000}
# {"time":"2023-10-06T06:51:45.249760631Z","level":"DEBUG","msg":"Destination request","url":"http://ipld-eth-server:8081/"}
# ...
```
* Check the ponder - ipld-eth-server payment channel status:
```bash
docker exec payments-nitro-rpc-client-1 npm exec -c "nitro-rpc-client get-payment-channel $PONDER_UPSTREAM_PAYMENT_CHANNEL -h go-nitro -p 4005"
# Expected output ('PaidSoFar' is non zero):
# {
# ID: '0x1178ac0f2a43e54a122216fa6afdd30333b590e49e50317a1f9274a591da0f96',
# Status: 'Open',
# Balance: {
# AssetAddress: '0x0000000000000000000000000000000000000000',
# Payee: '0xaaa6628ec44a8a742987ef3a114ddfe2d4f7adce',
# Payer: '0x67d5b55604d1af90074fcb69b8c51838fff84f8d',
# PaidSoFar: 215000n,
# RemainingFunds: 999785000n
# }
# }
```
* In another terminal run the ponder app in watcher mode:
```bash
docker exec -it payments-ponder-app-watcher-1 bash -c "DEBUG=laconic:payments pnpm start"
# Expected output:
# 11:23:22.057 DEBUG app Started using config file: ponder.config.ts
# 08:02:12.548 INFO payment Nitro node setup with address 0x111A00868581f73AB42FEEF67D235Ca09ca1E8db
# laconic:payments Starting voucher subscription... +0ms
# 08:02:17.417 INFO payment Creating ledger channel with nitro node 0x67D5b55604d1aF90074FcB69b8C51838FFF84f8d ...
# 08:02:37.135 INFO payment Creating payment channel with nitro node 0x67D5b55604d1aF90074FcB69b8C51838FFF84f8d ...
# 08:02:37.313 INFO payment Using payment channel 0x4b8e67f6a6fcfe114fdd60b85f963344ece4c77d4eea3825688c74b45ff5509b
# ...
# 11:23:22.436 INFO server Started responding as healthy
```
* Check the terminal in which indexer mode ponder is running. Logs of payment for `eth_getLogs` queries can be seen:
```bash
# ...
# 08:02:37.763 DEBUG realtime Finished processing new head block 89 (network=fixturenet)
# laconic:payments Received a payment voucher of 50 from 0x111A00868581f73AB42FEEF67D235Ca09ca1E8db +444ms
# laconic:payments Serving a paid query for 0x111A00868581f73AB42FEEF67D235Ca09ca1E8db +1ms
# 08:02:37.804 DEBUG payment Verified payment for GQL queries getLogEvents
# laconic:payments Received a payment voucher of 50 from 0x111A00868581f73AB42FEEF67D235Ca09ca1E8db +45ms
# laconic:payments Serving a paid query for 0x111A00868581f73AB42FEEF67D235Ca09ca1E8db +0ms
# 08:02:37.849 DEBUG payment Verified payment for GQL queries getLogEvents
```
## Clean Up
* In the MobyMask app, perform `VIRTUAL DEFUND` and `DIRECT DEFUND` (in order) for closing the payment channel created with watcher
* Run the following in the browser console to delete the Nitro node's data:
```bash
await clearNodeStorage()
```
* Run the following in the browser console to clear data in local storage:
```bash
localStorage.clear()
```
* On a fresh restart, clear activity tab data in MetaMask for concerned accounts

View File

@ -8,7 +8,7 @@ Core to the feature completeness of stack orchestrator is to [decouple the tool
## Example
- in `app/data/stacks/my-new-stack/stack.yml` add:
- in `stack_orchestrator/data/stacks/my-new-stack/stack.yml` add:
```yaml
version: "0.1"
@ -21,7 +21,7 @@ pods:
- my-new-stack
```
- in `app/data/container-build/cerc-my-new-stack/build.sh` add:
- in `stack_orchestrator/data/container-build/cerc-my-new-stack/build.sh` add:
```yaml
#!/usr/bin/env bash
@ -30,7 +30,7 @@ source ${CERC_CONTAINER_BASE_DIR}/build-base.sh
docker build -t cerc/my-new-stack:local -f ${CERC_REPO_BASE_DIR}/my-new-stack/Dockerfile ${build_command_args} ${CERC_REPO_BASE_DIR}/my-new-stack
```
- in `app/data/compose/docker-compose-my-new-stack.yml` add:
- in `stack_orchestrator/data/compose/docker-compose-my-new-stack.yml` add:
```yaml
version: "3.2"
@ -43,20 +43,20 @@ services:
- "0.0.0.0:3000:3000"
```
- in `app/data/repository-list.txt` add:
- in `stack_orchestrator/data/repository-list.txt` add:
```bash
github.com/my-org/my-new-stack
```
whereby that repository contains your source code and a `Dockerfile`, and matches the `repos:` field in the `stack.yml`.
- in `app/data/container-image-list.txt` add:
- in `stack_orchestrator/data/container-image-list.txt` add:
```bash
cerc/my-new-stack
```
- in `app/data/pod-list.txt` add:
- in `stack_orchestrator/data/pod-list.txt` add:
```bash
my-new-stack

View File

@ -2,10 +2,10 @@
The following tutorial explains the steps to run a laconicd fixturenet with CLI and web console that displays records in the registry. It is designed as an introduction to Stack Orchestrator and to showcase one component of the Laconic Stack. Prior to Stack Orchestrator, the following 4 repositories had to be cloned and setup manually:
- https://github.com/cerc-io/laconicd
- https://github.com/cerc-io/laconic-sdk
- https://github.com/cerc-io/laconic-registry-cli
- https://github.com/cerc-io/laconic-console
- https://git.vdb.to/cerc-io/laconicd
- https://git.vdb.to/cerc-io/laconic-sdk
- https://git.vdb.to/cerc-io/laconic-registry-cli
- https://git.vdb.to/cerc-io/laconic-console
Now, with Stack Orchestrator, it is a few quick commands. Additionally, the `docker` and `docker compose` integration on the back-end allows the stack to easily persist, facilitating workflows.
@ -52,7 +52,7 @@ laconic-so version
1. Get the repositories
```
laconic-so --stack fixturenet-laconic-loaded setup-repositories --include github.com/cerc-io/laconicd,github.com/cerc-io/laconic-sdk,github.com/cerc-io/laconic-registry-cli,github.com/cerc-io/laconic-console
laconic-so --stack fixturenet-laconic-loaded setup-repositories --include git.vdb.to/cerc-io/laconicd,git.vdb.to/cerc-io/laconic-sdk,git.vdb.to/cerc-io/laconic-registry-cli,git.vdb.to/cerc-io/laconic-console
```
2. Set this environment variable to the Laconic self-hosted Gitea instance:
@ -212,7 +212,7 @@ record:
3. Try out additional CLI commands
- these are documented [here](https://github.com/cerc-io/laconic-registry-cli#readme) and updates are forthcoming
- these are documented [here](https://git.vdb.to/cerc-io/laconic-registry-cli#readme) and updates are forthcoming
- e.g,:
```

View File

@ -1,4 +1,5 @@
python-decouple>=3.8
python-dotenv==1.0.0
GitPython>=3.1.32
tqdm>=4.65.0
python-on-whales>=0.64.0
@ -8,3 +9,4 @@ ruamel.yaml>=0.17.32
pydantic==1.10.9
tomli==2.0.1
validators==0.22.0
kubernetes>=28.1.0

View File

@ -1,6 +1,6 @@
build_tag_file_name=./app/data/build_tag.txt
build_tag_file_name=./stack_orchestrator/data/build_tag.txt
echo "# This file should be re-generated running: scripts/create_build_tag_file.sh script" > $build_tag_file_name
product_version_string=$( tail -1 ./app/data/version.txt )
product_version_string=$( tail -1 ./stack_orchestrator/data/version.txt )
commit_string=$( git rev-parse --short HEAD )
timestamp_string=$(date +'%Y%m%d%H%M')
build_tag_string=${product_version_string}-${commit_string}-${timestamp_string}

View File

@ -14,7 +14,7 @@ setup(
long_description=long_description,
long_description_content_type="text/markdown",
url='https://github.com/cerc-io/stack-orchestrator',
py_modules=['cli', 'app'],
py_modules=['stack_orchestrator'],
packages=find_packages(),
install_requires=[requirements],
python_requires='>=3.7',
@ -25,6 +25,6 @@ setup(
"Operating System :: OS Independent",
],
entry_points={
'console_scripts': ['laconic-so=cli:cli'],
'console_scripts': ['laconic-so=stack_orchestrator.main:cli'],
}
)

View File

@ -15,7 +15,7 @@
import os
from abc import ABC, abstractmethod
from app.deploy import get_stack_status
from stack_orchestrator.deploy.deploy import get_stack_status
from decouple import config

View File

@ -27,12 +27,79 @@ import subprocess
import click
import importlib.resources
from pathlib import Path
from app.util import include_exclude_check, get_parsed_stack_config
from app.base import get_npm_registry_url
from stack_orchestrator.util import include_exclude_check, get_parsed_stack_config
from stack_orchestrator.base import get_npm_registry_url
# TODO: find a place for this
# epilog="Config provided either in .env or settings.ini or env vars: CERC_REPO_BASE_DIR (defaults to ~/cerc)"
def make_container_build_env(dev_root_path: str,
container_build_dir: str,
debug: bool,
force_rebuild: bool,
extra_build_args: str):
container_build_env = {
"CERC_NPM_REGISTRY_URL": get_npm_registry_url(),
"CERC_GO_AUTH_TOKEN": config("CERC_GO_AUTH_TOKEN", default=""),
"CERC_NPM_AUTH_TOKEN": config("CERC_NPM_AUTH_TOKEN", default=""),
"CERC_REPO_BASE_DIR": dev_root_path,
"CERC_CONTAINER_BASE_DIR": container_build_dir,
"CERC_HOST_UID": f"{os.getuid()}",
"CERC_HOST_GID": f"{os.getgid()}",
"DOCKER_BUILDKIT": config("DOCKER_BUILDKIT", default="0")
}
container_build_env.update({"CERC_SCRIPT_DEBUG": "true"} if debug else {})
container_build_env.update({"CERC_FORCE_REBUILD": "true"} if force_rebuild else {})
container_build_env.update({"CERC_CONTAINER_EXTRA_BUILD_ARGS": extra_build_args} if extra_build_args else {})
docker_host_env = os.getenv("DOCKER_HOST")
if docker_host_env:
container_build_env.update({"DOCKER_HOST": docker_host_env})
return container_build_env
def process_container(container,
container_build_dir: str,
container_build_env: dict,
dev_root_path: str,
quiet: bool,
verbose: bool,
dry_run: bool,
continue_on_error: bool,
):
if not quiet:
print(f"Building: {container}")
build_dir = os.path.join(container_build_dir, container.replace("/", "-"))
build_script_filename = os.path.join(build_dir, "build.sh")
if verbose:
print(f"Build script filename: {build_script_filename}")
if os.path.exists(build_script_filename):
build_command = build_script_filename
else:
if verbose:
print(f"No script file found: {build_script_filename}, using default build script")
repo_dir = container.split('/')[1]
# TODO: make this less of a hack -- should be specified in some metadata somewhere
# Check if we have a repo for this container. If not, set the context dir to the container-build subdir
repo_full_path = os.path.join(dev_root_path, repo_dir)
repo_dir_or_build_dir = repo_full_path if os.path.exists(repo_full_path) else build_dir
build_command = os.path.join(container_build_dir,
"default-build.sh") + f" {container}:local {repo_dir_or_build_dir}"
if not dry_run:
if verbose:
print(f"Executing: {build_command} with environment: {container_build_env}")
build_result = subprocess.run(build_command, shell=True, env=container_build_env)
if verbose:
print(f"Return code is: {build_result.returncode}")
if build_result.returncode != 0:
print(f"Error running build for {container}")
if not continue_on_error:
print("FATAL Error: container build failed and --continue-on-error not set, exiting")
sys.exit(1)
else:
print("****** Container Build Error, continuing because --continue-on-error is set")
else:
print("Skipped")
@click.command()
@click.option('--include', help="only build these containers")
@ -52,7 +119,7 @@ def command(ctx, include, exclude, force_rebuild, extra_build_args):
continue_on_error = ctx.obj.continue_on_error
# See: https://stackoverflow.com/questions/25389095/python-get-path-of-root-project-structure
container_build_dir = Path(__file__).absolute().parent.joinpath("data", "container-build")
container_build_dir = Path(__file__).absolute().parent.parent.joinpath("data", "container-build")
if local_stack:
dev_root_path = os.getcwd()[0:os.getcwd().rindex("stack-orchestrator")]
@ -67,7 +134,7 @@ def command(ctx, include, exclude, force_rebuild, extra_build_args):
print('Dev root directory doesn\'t exist, creating')
# See: https://stackoverflow.com/a/20885799/1701505
from app import data
from stack_orchestrator import data
with importlib.resources.open_text(data, "container-image-list.txt") as container_list_file:
all_containers = container_list_file.read().splitlines()
@ -83,61 +150,16 @@ def command(ctx, include, exclude, force_rebuild, extra_build_args):
if stack:
print(f"Stack: {stack}")
# TODO: make this configurable
container_build_env = {
"CERC_NPM_REGISTRY_URL": get_npm_registry_url(),
"CERC_GO_AUTH_TOKEN": config("CERC_GO_AUTH_TOKEN", default=""),
"CERC_NPM_AUTH_TOKEN": config("CERC_NPM_AUTH_TOKEN", default=""),
"CERC_REPO_BASE_DIR": dev_root_path,
"CERC_CONTAINER_BASE_DIR": container_build_dir,
"CERC_HOST_UID": f"{os.getuid()}",
"CERC_HOST_GID": f"{os.getgid()}",
"DOCKER_BUILDKIT": config("DOCKER_BUILDKIT", default="0")
}
container_build_env.update({"CERC_SCRIPT_DEBUG": "true"} if debug else {})
container_build_env.update({"CERC_FORCE_REBUILD": "true"} if force_rebuild else {})
container_build_env.update({"CERC_CONTAINER_EXTRA_BUILD_ARGS": extra_build_args} if extra_build_args else {})
docker_host_env = os.getenv("DOCKER_HOST")
if docker_host_env:
container_build_env.update({"DOCKER_HOST": docker_host_env})
def process_container(container):
if not quiet:
print(f"Building: {container}")
build_dir = os.path.join(container_build_dir, container.replace("/", "-"))
build_script_filename = os.path.join(build_dir, "build.sh")
if verbose:
print(f"Build script filename: {build_script_filename}")
if os.path.exists(build_script_filename):
build_command = build_script_filename
else:
if verbose:
print(f"No script file found: {build_script_filename}, using default build script")
repo_dir = container.split('/')[1]
# TODO: make this less of a hack -- should be specified in some metadata somewhere
# Check if we have a repo for this container. If not, set the context dir to the container-build subdir
repo_full_path = os.path.join(dev_root_path, repo_dir)
repo_dir_or_build_dir = repo_full_path if os.path.exists(repo_full_path) else build_dir
build_command = os.path.join(container_build_dir, "default-build.sh") + f" {container}:local {repo_dir_or_build_dir}"
if not dry_run:
if verbose:
print(f"Executing: {build_command} with environment: {container_build_env}")
build_result = subprocess.run(build_command, shell=True, env=container_build_env)
if verbose:
print(f"Return code is: {build_result.returncode}")
if build_result.returncode != 0:
print(f"Error running build for {container}")
if not continue_on_error:
print("FATAL Error: container build failed and --continue-on-error not set, exiting")
sys.exit(1)
else:
print("****** Container Build Error, continuing because --continue-on-error is set")
else:
print("Skipped")
container_build_env = make_container_build_env(dev_root_path,
container_build_dir,
debug,
force_rebuild,
extra_build_args)
for container in containers_in_scope:
if include_exclude_check(container, include, exclude):
process_container(container)
process_container(container, container_build_dir, container_build_env,
dev_root_path, quiet, verbose, dry_run, continue_on_error)
else:
if verbose:
print(f"Excluding: {container}")

View File

@ -25,8 +25,8 @@ from decouple import config
import click
import importlib.resources
from python_on_whales import docker, DockerException
from app.base import get_stack
from app.util import include_exclude_check, get_parsed_stack_config
from stack_orchestrator.base import get_stack
from stack_orchestrator.util import include_exclude_check, get_parsed_stack_config
builder_js_image_name = "cerc/builder-js:local"
@ -83,7 +83,7 @@ def command(ctx, include, exclude, force_rebuild, extra_build_args):
os.makedirs(build_root_path)
# See: https://stackoverflow.com/a/20885799/1701505
from app import data
from stack_orchestrator import data
with importlib.resources.open_text(data, "npm-package-list.txt") as package_list_file:
all_packages = package_list_file.read().splitlines()

View File

@ -0,0 +1,76 @@
# Copyright © 2022, 2023 Vulcanize
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http:#www.gnu.org/licenses/>.
# Builds webapp containers
# env vars:
# CERC_REPO_BASE_DIR defaults to ~/cerc
# TODO: display the available list of containers; allow re-build of either all or specific containers
import os
from decouple import config
import click
from pathlib import Path
from stack_orchestrator.build import build_containers
@click.command()
@click.option('--base-container', default="cerc/nextjs-base")
@click.option('--source-repo', help="directory containing the webapp to build", required=True)
@click.option("--force-rebuild", is_flag=True, default=False, help="Override dependency checking -- always rebuild")
@click.option("--extra-build-args", help="Supply extra arguments to build")
@click.pass_context
def command(ctx, base_container, source_repo, force_rebuild, extra_build_args):
'''build the specified webapp container'''
quiet = ctx.obj.quiet
verbose = ctx.obj.verbose
dry_run = ctx.obj.dry_run
debug = ctx.obj.debug
local_stack = ctx.obj.local_stack
stack = ctx.obj.stack
continue_on_error = ctx.obj.continue_on_error
# See: https://stackoverflow.com/questions/25389095/python-get-path-of-root-project-structure
container_build_dir = Path(__file__).absolute().parent.parent.joinpath("data", "container-build")
if local_stack:
dev_root_path = os.getcwd()[0:os.getcwd().rindex("stack-orchestrator")]
print(f'Local stack dev_root_path (CERC_REPO_BASE_DIR) overridden to: {dev_root_path}')
else:
dev_root_path = os.path.expanduser(config("CERC_REPO_BASE_DIR", default="~/cerc"))
if not quiet:
print(f'Dev Root is: {dev_root_path}')
# First build the base container.
container_build_env = build_containers.make_container_build_env(dev_root_path, container_build_dir, debug,
force_rebuild, extra_build_args)
build_containers.process_container(base_container, container_build_dir, container_build_env, dev_root_path, quiet,
verbose, dry_run, continue_on_error)
# Now build the target webapp. We use the same build script, but with a different Dockerfile and work dir.
container_build_env["CERC_CONTAINER_BUILD_WORK_DIR"] = os.path.abspath(source_repo)
container_build_env["CERC_CONTAINER_BUILD_DOCKERFILE"] = os.path.join(container_build_dir,
base_container.replace("/", "-"),
"Dockerfile.webapp")
webapp_name = os.path.abspath(source_repo).split(os.path.sep)[-1]
container_build_env["CERC_CONTAINER_BUILD_TAG"] = f"cerc/{webapp_name}:local"
build_containers.process_container(base_container, container_build_dir, container_build_env, dev_root_path, quiet,
verbose, dry_run, continue_on_error)

View File

@ -11,10 +11,13 @@ services:
condition: service_completed_successfully
environment:
CERC_NITRO_CHAIN_URL: ${CERC_NITRO_CHAIN_URL:-ws://fixturenet-eth-geth-1:8546}
CERC_NITRO_PK: ${CERC_NITRO_PK:-2d999770f7b5d49b694080f987b82bbc9fc9ac2b4dcc10b0f8aba7d700f69c6d}
CERC_NITRO_CHAIN_PK: ${CERC_NITRO_CHAIN_PK:-570b909da9669b2f35a0b1ac70b8358516d55ae1b5b3710e95e9a94395090597}
CERC_NITRO_PK: ${CERC_NITRO_PK:-f36f6dd450892224ee113899195ef922a4795d41c32cafb386d9aab6e0b7b0c6}
CERC_NITRO_CHAIN_PK: ${CERC_NITRO_CHAIN_PK:-888814df89c4358d7ddb3fa4b0213e7331239a80e1f013eaa7b2deca2a41a218}
CERC_NITRO_USE_DURABLE_STORE: ${CERC_NITRO_USE_DURABLE_STORE:-true}
CERC_NITRO_DURABLE_STORE_FOLDER: ${CERC_NITRO_DURABLE_STORE_FOLDER:-/app/data/nitro-store}
CERC_NITRO_MSG_PORT: ${CERC_NITRO_MSG_PORT:-3006}
CERC_NITRO_WS_MSG_PORT: ${CERC_NITRO_WS_MSG_PORT:-5006}
CERC_NITRO_RPC_PORT: ${CERC_NITRO_RPC_PORT:-4006}
CERC_NA_ADDRESS: ${CERC_NA_ADDRESS}
CERC_VPA_ADDRESS: ${CERC_VPA_ADDRESS}
CERC_CA_ADDRESS: ${CERC_CA_ADDRESS}
@ -24,25 +27,15 @@ services:
- nitro_deployment:/app/deployment
- ../config/go-nitro/run-nitro-node.sh:/app/run-nitro-node.sh
healthcheck:
test: ["CMD", "nc", "-vz", "localhost", "4005"]
interval: 10s
test: ["CMD", "nc", "-vz", "localhost", "4006"]
interval: 30s
timeout: 5s
retries: 10
start_period: 10s
ports:
- "3005"
- "4005:4005"
- "5005:5005"
nitro-rpc-client:
image: cerc/nitro-rpc-client:local
hostname: nitro-rpc-client
restart: on-failure
depends_on:
# Wait for the go-nitro node to start
go-nitro:
condition: service_healthy
command: ["bash", "-c", "tail -f /dev/null"]
- "3006:3006"
- "4006:4006"
- "5006:5006"
volumes:
go_nitro_data:

View File

@ -0,0 +1,111 @@
version: "3.2"
services:
ipld-eth-server-1:
restart: unless-stopped
depends_on:
ipld-eth-db:
condition: service_healthy
image: cerc/ipld-eth-server:local
environment:
SERVER_HTTP_PATH: 0.0.0.0:8081
SERVER_GRAPHQL: "true"
SERVER_GRAPHQLPATH: 0.0.0.0:8082
VDB_COMMAND: "serve"
ETH_CHAIN_CONFIG: "/tmp/chain.json"
DATABASE_NAME: cerc_testing
DATABASE_HOSTNAME: ipld-eth-db
DATABASE_PORT: 5432
DATABASE_USER: "vdbm"
DATABASE_PASSWORD: "password"
ETH_CHAIN_ID: 99
ETH_FORWARD_ETH_CALLS: "false"
ETH_FORWARD_GET_STORAGE_AT: "false"
ETH_PROXY_ON_ERROR: "false"
METRICS: "true"
PROM_HTTP: "true"
PROM_HTTP_ADDR: "0.0.0.0"
PROM_HTTP_PORT: "8090"
LOG_LEVEL: "debug"
CERC_REMOTE_DEBUG: ${CERC_REMOTE_DEBUG:-true}
NITRO_RUN_NODE_IN_PROCESS: ${CERC_NITRO_RUN_NODE_IN_PROCESS:-true}
NITRO_PK: ${CERC_NITRO_PK:-2d999770f7b5d49b694080f987b82bbc9fc9ac2b4dcc10b0f8aba7d700f69c6d}
NITRO_CHAIN_PK: ${CERC_NITRO_CHAIN_PK:-570b909da9669b2f35a0b1ac70b8358516d55ae1b5b3710e95e9a94395090597}
NITRO_CHAIN_URL: ${CERC_NITRO_CHAIN_URL:-ws://fixturenet-eth-geth-1:8546}
NITRO_USE_DURABLE_STORE: ${CERC_NITRO_USE_DURABLE_STORE:-true}
NITRO_DURABLE_STORE_FOLDER: ${CERC_NITRO_DURABLE_STORE_FOLDER:-/app/nitro-data/nitro-store}
CERC_NA_ADDRESS: ${CERC_NA_ADDRESS}
CERC_VPA_ADDRESS: ${CERC_VPA_ADDRESS}
CERC_CA_ADDRESS: ${CERC_CA_ADDRESS}
entrypoint: ["bash", "-c", "/app/entrypoint.sh"]
volumes:
- type: bind
source: ../config/ipld-eth-server/chain.json
target: /tmp/chain.json
- eth_server_nitro_data:/app/nitro-data
- nitro_deployment:/app/deployment
- ../config/ipld-eth-server/entrypoint.sh:/app/entrypoint.sh
ports:
- "8081"
- "8082"
- "8090"
- "40000"
- "3005:3005"
- "4005:4005"
- "5005:5005"
healthcheck:
test: ["CMD", "nc", "-v", "localhost", "8081"]
interval: 20s
timeout: 5s
retries: 15
start_period: 5s
ipld-eth-server-2:
restart: unless-stopped
depends_on:
ipld-eth-db:
condition: service_healthy
image: cerc/ipld-eth-server:local
environment:
SERVER_HTTP_PATH: 0.0.0.0:8081
SERVER_GRAPHQL: "true"
SERVER_GRAPHQLPATH: 0.0.0.0:8082
VDB_COMMAND: "serve"
ETH_CHAIN_CONFIG: "/tmp/chain.json"
DATABASE_NAME: cerc_testing
DATABASE_HOSTNAME: ipld-eth-db
DATABASE_PORT: 5432
DATABASE_USER: "vdbm"
DATABASE_PASSWORD: "password"
ETH_CHAIN_ID: 99
ETH_FORWARD_ETH_CALLS: "false"
ETH_FORWARD_GET_STORAGE_AT: "false"
ETH_PROXY_ON_ERROR: "false"
METRICS: "true"
PROM_HTTP: "true"
PROM_HTTP_ADDR: "0.0.0.0"
PROM_HTTP_PORT: "8090"
LOG_LEVEL: "debug"
CERC_REMOTE_DEBUG: ${CERC_REMOTE_DEBUG:-true}
NITRO_RUN_NODE_IN_PROCESS: ${CERC_NITRO_RUN_NODE_IN_PROCESS:-false}
NITRO_ENDPOINT: ${CERC_NITRO_ENDPOINT:-go-nitro:4006/api/v1}
entrypoint: ["bash", "-c", "/app/entrypoint.sh"]
volumes:
- type: bind
source: ../config/ipld-eth-server/chain.json
target: /tmp/chain.json
- ../config/ipld-eth-server/entrypoint.sh:/app/entrypoint.sh
ports:
- "8081"
- "8082"
- "8090"
- "40000"
healthcheck:
test: ["CMD", "nc", "-v", "localhost", "8081"]
interval: 20s
timeout: 5s
retries: 15
start_period: 5s
volumes:
eth_server_nitro_data:
nitro_deployment:

View File

@ -0,0 +1,8 @@
version: "3.2"
services:
laconic-dot-com:
image: cerc/laconic-dot-com:local
restart: always
ports:
- "3000"

View File

@ -0,0 +1,29 @@
version: "3.2"
services:
migrations:
restart: on-failure
depends_on:
ipld-eth-db:
condition: service_healthy
image: cerc/ipld-eth-db:local
env_file:
- ../config/mainnet-eth-ipld-eth-db/db.env
ipld-eth-db:
image: timescale/timescaledb:2.8.1-pg14
restart: always
env_file:
- ../config/mainnet-eth-ipld-eth-db/db.env
volumes:
- mainnet_eth_ipld_eth_db:/var/lib/postgresql/data
healthcheck:
test: ["CMD", "nc", "-v", "localhost", "5432"]
interval: 30s
timeout: 10s
retries: 10
start_period: 3s
ports:
- "5432"
volumes:
mainnet_eth_ipld_eth_db:

View File

@ -0,0 +1,24 @@
version: "3.7"
services:
ipld-eth-server:
restart: always
depends_on:
ipld-eth-db:
condition: service_healthy
image: cerc/ipld-eth-server:local
env_file:
- ../config/mainnet-eth-ipld-eth-db/db.env
- ../config/mainnet-eth-ipld-eth-server/srv.env
volumes:
- ../config/mainnet-eth-ipld-eth-server/config.toml:/app/config.toml:ro
ports:
- "8081"
- "8082"
- "8090"
- "40001"
healthcheck:
test: ["CMD", "nc", "-v", "localhost", "8081"]
interval: 20s
timeout: 5s
retries: 15
start_period: 5s

View File

@ -6,7 +6,7 @@ services:
env_file:
- ../config/mainnet-eth-keycloak/keycloak.env
healthcheck:
test: ["CMD", "nc", "-v", "localhost", "5432"]
test: ["CMD", "nc", "-v", "localhost", "35432"]
interval: 30s
timeout: 10s
retries: 10
@ -14,7 +14,7 @@ services:
volumes:
- mainnet_eth_keycloak_db:/var/lib/postgresql/data
ports:
- 5432
- 35432
keycloak:
image: cerc/keycloak:local

View File

@ -0,0 +1,72 @@
services:
mainnet-eth-geth-1:
restart: always
hostname: mainnet-eth-geth-1
cap_add:
- SYS_PTRACE
image: cerc/plugeth-with-plugins:local
entrypoint: /bin/sh
command: -c "/opt/run-geth.sh"
env_file:
- ../config/mainnet-eth-ipld-eth-db/db.env
- ../config/mainnet-eth-plugeth/geth.env
volumes:
- mainnet_eth_plugeth_geth_1_data:/data
- mainnet_eth_plugeth_config_data:/etc/mainnet-eth
- ../config/mainnet-eth-plugeth/scripts/run-geth.sh:/opt/run-geth.sh
healthcheck:
test: ["CMD", "nc", "-v", "localhost", "8545"]
interval: 30s
timeout: 10s
retries: 10
start_period: 3s
ports:
# http api
- "8545"
# ws api
- "8546"
# ws el
- "8551"
# p2p
- "30303"
- "30303/udp"
# debugging
- "40000"
# metrics
- "6060"
mainnet-eth-lighthouse-1:
restart: always
hostname: mainnet-eth-lighthouse-1
healthcheck:
test: ["CMD", "wget", "--tries=1", "--connect-timeout=1", "--quiet", "-O", "-", "http://localhost:5052/eth/v2/beacon/blocks/head"]
interval: 30s
timeout: 10s
retries: 10
start_period: 30s
environment:
LIGHTHOUSE_EXECUTION_ENDPOINT: "http://mainnet-eth-geth-1:8551"
env_file:
- ../config/mainnet-eth-plugeth/lighthouse.env
image: cerc/lighthouse:local
entrypoint: /bin/sh
command: -c "/opt/run-lighthouse.sh"
volumes:
- mainnet_eth_plugeth_lighthouse_1_data:/data
- mainnet_eth_plugeth_config_data:/etc/mainnet-eth
- ../config/mainnet-eth-plugeth/scripts/run-lighthouse.sh:/opt/run-lighthouse.sh
ports:
# api
- "5052"
# metrics
- "5054"
# p2p
- "9000"
- "9000/udp"
volumes:
mainnet_eth_plugeth_config_data:
mainnet_eth_plugeth_geth_1_data:
mainnet_eth_plugeth_lighthouse_1_data:

View File

@ -0,0 +1,7 @@
version: '3.7'
services:
nitro-rpc-client:
image: cerc/nitro-rpc-client:local
hostname: nitro-rpc-client
restart: on-failure
command: ["bash", "-c", "tail -f /dev/null"]

View File

@ -0,0 +1,80 @@
version: '3.7'
services:
ponder-er20-contracts:
image: cerc/watcher-erc20:local
restart: on-failure
environment:
CERC_SCRIPT_DEBUG: ${CERC_SCRIPT_DEBUG}
CERC_ETH_RPC_ENDPOINT: ${CERC_ETH_RPC_ENDPOINT:-http://fixturenet-eth-geth-1:8545}
CERC_PRIVATE_KEY_DEPLOYER: ${CERC_PRIVATE_KEY_DEPLOYER:-0x888814df89c4358d7ddb3fa4b0213e7331239a80e1f013eaa7b2deca2a41a218}
volumes:
- ../config/ponder/deploy-erc20-contract.sh:/app/deploy-erc20-contract.sh
- erc20_deployment:/app/deployment
command: ["bash", "-c", "/app/deploy-erc20-contract.sh"]
extra_hosts:
- "host.docker.internal:host-gateway"
ponder-app-indexer-1:
hostname: ponder-app-indexer-1
restart: unless-stopped
image: cerc/ponder:local
working_dir: /app/examples/token-erc20
environment:
CERC_PONDER_CHAIN_ID: ${PONDER_CHAIN_ID:-99}
CERC_PONDER_RPC_URL_1: ${PONDER_RPC_URL_1:-http://ipld-eth-server-2:8081}
CERC_PONDER_NITRO_PK: ${CERC_PONDER_INDEXER_NITRO_PK_1:-58368d20ff12f17669c06158c21d885897aa56f9be430edc789614bf9851d53f}
CERC_PONDER_NITRO_CHAIN_PK: ${CERC_PONDER_INDEXER_NITRO_CHAIN_PK_1:-fb1e9af328c283ca3e2486e7c24d13582b7912057d8b9542ff41503c85bc05c0}
CERC_PONDER_NITRO_CHAIN_URL: ${CERC_PONDER_NITRO_CHAIN_URL:-http://fixturenet-eth-geth-1:8546}
CERC_RELAY_MULTIADDR: ${CERC_RELAY_MULTIADDR}
CERC_UPSTREAM_NITRO_ADDRESS: ${CERC_UPSTREAM_NITRO_ADDRESS:-0x660a4bEF3fbC863Fcd8D3CDB39242aE513d7D92e}
CERC_UPSTREAM_NITRO_MULTIADDR: ${CERC_UPSTREAM_NITRO_MULTIADDR:-/dns4/go-nitro/tcp/5006/ws/p2p/16Uiu2HAmNUiX7bpCpbo5JdqEebp85ptGU2Vk2AT9E3BykvbwQ3F9}
CERC_UPSTREAM_NITRO_PAY_AMOUNT: ${CERC_UPSTREAM_NITRO_PAY_AMOUNT:-100}
command: ["bash", "./ponder-start.sh"]
volumes:
- ../config/ponder/ponder-start.sh:/app/examples/token-erc20/ponder-start.sh
- ../config/ponder/ponder.indexer-1.config.ts:/app/examples/token-erc20/ponder.config.ts
- ../config/ponder/base-rates-config.json:/app/examples/token-erc20/base-rates-config.json
- peers_ids:/peers
- nitro_deployment:/nitro
- erc20_deployment:/erc20
- ponder_indexer_1_nitro_data:/app/examples/token-erc20/.ponder/nitro-db
ports:
- "127.0.0.1:42070:42070"
extra_hosts:
- "host.docker.internal:host-gateway"
ponder-app-indexer-2:
hostname: ponder-app-indexer-2
restart: unless-stopped
image: cerc/ponder:local
working_dir: /app/examples/token-erc20
environment:
CERC_PONDER_CHAIN_ID: ${PONDER_CHAIN_ID:-99}
CERC_INDEXER_GQL_ENDPOINT: ${CERC_INDEXER_GQL_ENDPOINT:-http://ponder-app-indexer-1:42070/graphql}
CERC_PONDER_NITRO_PK: ${CERC_PONDER_INDEXER_NITRO_PK_2:-0aca28ba64679f63d71e671ab4dbb32aaa212d4789988e6ca47da47601c18fe2}
CERC_PONDER_NITRO_CHAIN_PK: ${CERC_PONDER_INDEXER_NITRO_CHAIN_PK_2:-6177345b77c4069ac4d553f8b43cf68a799ca4bb63eac93d6cf796d63694ebf0}
CERC_PONDER_NITRO_CHAIN_URL: ${CERC_PONDER_NITRO_CHAIN_URL:-http://fixturenet-eth-geth-1:8546}
CERC_RELAY_MULTIADDR: ${CERC_RELAY_MULTIADDR}
CERC_INDEXER_NITRO_ADDRESS: ${CERC_INDEXER_NITRO_ADDRESS:-0x67D5b55604d1aF90074FcB69b8C51838FFF84f8d}
CERC_UPSTREAM_NITRO_PAY_AMOUNT: ${CERC_UPSTREAM_NITRO_PAY_AMOUNT:-100}
command: ["bash", "./ponder-start.sh"]
volumes:
- ../config/ponder/ponder-start.sh:/app/examples/token-erc20/ponder-start.sh
- ../config/ponder/ponder.indexer-2.config.ts:/app/examples/token-erc20/ponder.config.ts
- ../config/ponder/base-rates-config.json:/app/examples/token-erc20/base-rates-config.json
- peers_ids:/peers
- nitro_deployment:/nitro
- erc20_deployment:/erc20
- ponder_indexer_2_nitro_data:/app/examples/token-erc20/.ponder/nitro-db
ports:
- "127.0.0.1:42071:42070"
extra_hosts:
- "host.docker.internal:host-gateway"
volumes:
peers_ids:
nitro_deployment:
erc20_deployment:
ponder_indexer_1_nitro_data:
ponder_indexer_2_nitro_data:

View File

@ -0,0 +1,39 @@
version: '3.7'
services:
ponder-app-watcher:
hostname: ponder-app-watcher
depends_on:
- ponder-app-indexer-1
restart: unless-stopped
image: cerc/ponder:local
working_dir: /app/examples/token-erc20
environment:
CERC_PONDER_CHAIN_ID: ${PONDER_CHAIN_ID:-99}
CERC_PONDER_NITRO_PK: ${CERC_PONDER_WATCHER_NITRO_PK:-febb3b74b0b52d0976f6571d555f4ac8b91c308dfa25c7b58d1e6a7c3f50c781}
CERC_PONDER_NITRO_CHAIN_PK: ${CERC_PONDER_WATCHER_NITRO_CHAIN_PK:-be4aa664815ea3bc3d63118649a733f6c96b243744310806ecb6d96359ab62cf}
CERC_PONDER_NITRO_CHAIN_URL: ${CERC_PONDER_NITRO_CHAIN_URL:-http://fixturenet-eth-geth-1:8546}
CERC_RELAY_MULTIADDR: ${CERC_RELAY_MULTIADDR}
CERC_INDEXER_GQL_ENDPOINT: ${CERC_INDEXER_GQL_ENDPOINT:-http://ponder-app-indexer-2:42070/graphql}
CERC_INDEXER_NITRO_ADDRESS: ${CERC_INDEXER_NITRO_ADDRESS:-0xB2B22ec3889d11f2ddb1A1Db11e80D20EF367c01}
CERC_INDEXER_NITRO_PAY_AMOUNT: ${CERC_INDEXER_NITRO_PAY_AMOUNT:-50}
command: ["bash", "./ponder-start.sh"]
volumes:
- ../config/ponder/ponder-start.sh:/app/examples/token-erc20/ponder-start.sh
- ../config/ponder/ponder.watcher.config.ts:/app/examples/token-erc20/ponder.config.ts
- ../config/ponder/base-rates-config.json:/app/examples/token-erc20/base-rates-config.json
- peers_ids:/peers
- nitro_deployment:/nitro
- erc20_deployment:/erc20
- ponder_watcher_nitro_data:/app/examples/token-erc20/.ponder/nitro-db
ports:
- "127.0.0.1:42069:42069"
extra_hosts:
- "host.docker.internal:host-gateway"
volumes:
peers_ids:
nitro_deployment:
erc20_deployment:
ponder_watcher_nitro_data:

View File

@ -34,7 +34,7 @@ services:
- ETH_RPC_URL=http://go-ethereum:8545
command: ["sh", "-c", "yarn server"]
volumes:
- ../config/watcher-erc20/erc20-watcher.toml:/app/packages/erc20-watcher/environments/local.toml
- ../config/watcher-erc20/erc20-watcher.toml:/app/environments/local.toml
ports:
- "0.0.0.0:3002:3001"
- "0.0.0.0:9002:9001"

View File

@ -89,8 +89,8 @@ services:
CERC_PEER_ID: ${CERC_PEER_ID}
CERC_ENABLE_UPSTREAM_PAYMENTS: ${CERC_ENABLE_UPSTREAM_PAYMENTS}
CERC_UPSTREAM_NITRO_ADDRESS: ${CERC_UPSTREAM_NITRO_ADDRESS:-0xAAA6628Ec44A8a742987EF3A114dDFE2D4F7aDCE}
CERC_UPSTREAM_NITRO_MULTIADDR: ${CERC_UPSTREAM_NITRO_MULTIADDR:-/dns4/go-nitro/tcp/5005/ws/p2p/16Uiu2HAmSjXJqsyBJgcBUU2HQmykxGseafSatbpq5471XmuaUqyv}
CERC_UPSTREAM_NITRO_PAY_AMOUNT: ${CERC_UPSTREAM_NITRO_PAY_AMOUNT:-5000}
CERC_UPSTREAM_NITRO_MULTIADDR: ${CERC_UPSTREAM_NITRO_MULTIADDR:-/dns4/ipld-eth-server-1/tcp/5005/ws/p2p/16Uiu2HAmSjXJqsyBJgcBUU2HQmykxGseafSatbpq5471XmuaUqyv}
CERC_UPSTREAM_NITRO_PAY_AMOUNT: ${CERC_UPSTREAM_NITRO_PAY_AMOUNT:-100}
command: ["bash", "./start-server.sh"]
volumes:
- ../config/watcher-mobymask-v3/watcher-config-template.toml:/app/environments/watcher-config-template.toml

Some files were not shown because too many files have changed in this diff Show More