Compare commits

..

6 Commits

Author SHA1 Message Date
srwadleigh
6662252649 update pod name 2023-12-06 21:52:23 +00:00
srwadleigh
0b88d47832 clean up readme formatting 2023-12-06 21:51:36 +00:00
srwadleigh
62a78d48fc fix typo in readme 2023-12-06 21:49:49 +00:00
srwadleigh
80713153d8 Merge remote-tracking branch 'origin/main' into keycloak 2023-12-06 21:44:12 +00:00
srwadleigh
ca057925da update readme 2023-10-25 17:59:47 +00:00
srwadleigh
a3e141769a initial pass at a standalone keycloak stack 2023-10-25 17:47:33 +00:00
177 changed files with 855 additions and 37228 deletions

View File

@ -6,8 +6,6 @@ on:
paths: paths:
- '!**' - '!**'
- '.gitea/workflows/triggers/fixturenet-eth-plugeth-test' - '.gitea/workflows/triggers/fixturenet-eth-plugeth-test'
schedule: # Note: coordinate with other tests to not overload runners at the same time of day
- cron: '2 14 * * *'
# Needed until we can incorporate docker startup into the executor container # Needed until we can incorporate docker startup into the executor container
env: env:

View File

@ -6,8 +6,11 @@ on:
paths: paths:
- '!**' - '!**'
- '.gitea/workflows/triggers/fixturenet-laconicd-test' - '.gitea/workflows/triggers/fixturenet-laconicd-test'
schedule:
- cron: '1 13 * * *' # Needed until we can incorporate docker startup into the executor container
env:
DOCKER_HOST: unix:///var/run/dind.sock
jobs: jobs:
test: test:
@ -44,5 +47,9 @@ jobs:
run: ./scripts/create_build_tag_file.sh run: ./scripts/create_build_tag_file.sh
- name: "Build local shiv package" - name: "Build local shiv package"
run: ./scripts/build_shiv_package.sh run: ./scripts/build_shiv_package.sh
- name: Start dockerd # Also needed until we can incorporate into the executor
run: |
dockerd -H $DOCKER_HOST --userland-proxy=false &
sleep 5
- name: "Run fixturenet-laconicd tests" - name: "Run fixturenet-laconicd tests"
run: ./tests/fixturenet-laconicd/run-test.sh run: ./tests/fixturenet-laconicd/run-test.sh

View File

@ -1,21 +0,0 @@
name: Lint Checks
on:
pull_request:
branches: '*'
push:
branches: '*'
jobs:
test:
name: "Run linter"
runs-on: ubuntu-latest
steps:
- name: "Clone project repository"
uses: actions/checkout@v3
- name: "Install Python"
uses: actions/setup-python@v4
with:
python-version: '3.8'
- name : "Run flake8"
uses: py-actions/flake8@v2

View File

@ -1,54 +0,0 @@
name: Container Registry Test
on:
push:
branches: '*'
paths:
- '!**'
- '.gitea/workflows/triggers/test-container-registry'
- '.gitea/workflows/test-container-registry.yml'
- 'tests/container-registry/run-test.sh'
schedule: # Note: coordinate with other tests to not overload runners at the same time of day
- cron: '6 19 * * *'
jobs:
test:
name: "Run contaier registry hosting test on kind/k8s"
runs-on: ubuntu-22.04
steps:
- name: "Clone project repository"
uses: actions/checkout@v3
# At present the stock setup-python action fails on Linux/aarch64
# Conditional steps below workaroud this by using deadsnakes for that case only
- name: "Install Python for ARM on Linux"
if: ${{ runner.arch == 'arm64' && runner.os == 'Linux' }}
uses: deadsnakes/action@v3.0.1
with:
python-version: '3.8'
- name: "Install Python cases other than ARM on Linux"
if: ${{ ! (runner.arch == 'arm64' && runner.os == 'Linux') }}
uses: actions/setup-python@v4
with:
python-version: '3.8'
- name: "Print Python version"
run: python3 --version
- name: "Install shiv"
run: pip install shiv
- name: "Generate build version file"
run: ./scripts/create_build_tag_file.sh
- name: "Build local shiv package"
run: ./scripts/build_shiv_package.sh
- name: "Check cgroups version"
run: mount | grep cgroup
- name: "Install kind"
run: ./tests/scripts/install-kind.sh
- name: "Install Kubectl"
run: ./tests/scripts/install-kubectl.sh
- name: "Install ed" # Only needed until we remove the need to edit the spec file
run: apt update && apt install -y ed
- name: "Run container registry deployment test"
run: |
source /opt/bash-utils/cgroup-helper.sh
join_cgroup
./tests/container-registry/run-test.sh

View File

@ -1,52 +0,0 @@
name: Database Test
on:
push:
branches: '*'
paths:
- '!**'
- '.gitea/workflows/triggers/test-database'
- '.gitea/workflows/test-database.yml'
- 'tests/database/run-test.sh'
schedule: # Note: coordinate with other tests to not overload runners at the same time of day
- cron: '5 18 * * *'
jobs:
test:
name: "Run database hosting test on kind/k8s"
runs-on: ubuntu-22.04
steps:
- name: "Clone project repository"
uses: actions/checkout@v3
# At present the stock setup-python action fails on Linux/aarch64
# Conditional steps below workaroud this by using deadsnakes for that case only
- name: "Install Python for ARM on Linux"
if: ${{ runner.arch == 'arm64' && runner.os == 'Linux' }}
uses: deadsnakes/action@v3.0.1
with:
python-version: '3.8'
- name: "Install Python cases other than ARM on Linux"
if: ${{ ! (runner.arch == 'arm64' && runner.os == 'Linux') }}
uses: actions/setup-python@v4
with:
python-version: '3.8'
- name: "Print Python version"
run: python3 --version
- name: "Install shiv"
run: pip install shiv
- name: "Generate build version file"
run: ./scripts/create_build_tag_file.sh
- name: "Build local shiv package"
run: ./scripts/build_shiv_package.sh
- name: "Check cgroups version"
run: mount | grep cgroup
- name: "Install kind"
run: ./tests/scripts/install-kind.sh
- name: "Install Kubectl"
run: ./tests/scripts/install-kubectl.sh
- name: "Run database deployment test"
run: |
source /opt/bash-utils/cgroup-helper.sh
join_cgroup
./tests/database/run-test.sh

View File

@ -4,19 +4,20 @@ on:
pull_request: pull_request:
branches: '*' branches: '*'
push: push:
branches: '*' branches:
paths: - main
- '!**' - ci-test
- '.gitea/workflows/triggers/test-k8s-deploy' paths-ignore:
- '.gitea/workflows/test-k8s-deploy.yml' - '.gitea/workflows/triggers/*'
- 'tests/k8s-deploy/run-deploy-test.sh'
schedule: # Note: coordinate with other tests to not overload runners at the same time of day # Needed until we can incorporate docker startup into the executor container
- cron: '3 15 * * *' env:
DOCKER_HOST: unix:///var/run/dind.sock
jobs: jobs:
test: test:
name: "Run deploy test suite on kind/k8s" name: "Run deploy test suite"
runs-on: ubuntu-22.04 runs-on: ubuntu-latest
steps: steps:
- name: "Clone project repository" - name: "Clone project repository"
uses: actions/checkout@v3 uses: actions/checkout@v3
@ -40,15 +41,15 @@ jobs:
run: ./scripts/create_build_tag_file.sh run: ./scripts/create_build_tag_file.sh
- name: "Build local shiv package" - name: "Build local shiv package"
run: ./scripts/build_shiv_package.sh run: ./scripts/build_shiv_package.sh
- name: "Check cgroups version" - name: Start dockerd # Also needed until we can incorporate into the executor
run: mount | grep cgroup
- name: "Install kind"
run: ./tests/scripts/install-kind.sh
- name: "Install Kubectl"
run: ./tests/scripts/install-kubectl.sh
- name: "Run k8s deployment test"
run: | run: |
source /opt/bash-utils/cgroup-helper.sh dockerd -H $DOCKER_HOST --userland-proxy=false &
join_cgroup sleep 5
./tests/k8s-deploy/run-deploy-test.sh - name: "Install Go"
uses: actions/setup-go@v4
with:
go-version: '1.21'
- name: "Install Kind"
run: go install sigs.k8s.io/kind@v0.20.0
- name: "Debug Kind"
run: kind create cluster --retain && docker logs kind-control-plane

View File

@ -41,8 +41,6 @@ jobs:
run: ./scripts/create_build_tag_file.sh run: ./scripts/create_build_tag_file.sh
- name: "Build local shiv package" - name: "Build local shiv package"
run: ./scripts/build_shiv_package.sh run: ./scripts/build_shiv_package.sh
- name: "Install wget" # 20240109 - Only needed until the executors are updated.
run: apt update && apt install -y wget
- name: Start dockerd # Also needed until we can incorporate into the executor - name: Start dockerd # Also needed until we can incorporate into the executor
run: | run: |
dockerd -H $DOCKER_HOST --userland-proxy=false & dockerd -H $DOCKER_HOST --userland-proxy=false &

View File

@ -1,3 +1,2 @@
Change this file to trigger running the fixturenet-eth-plugeth-test CI job Change this file to trigger running the fixturenet-eth-plugeth-test CI job
trigger trigger
trigger

View File

@ -1,3 +1,2 @@
Change this file to trigger running the fixturenet-laconicd-test CI job Change this file to trigger running the fixturenet-laconicd-test CI job
Trigger
Trigger

View File

@ -1 +0,0 @@
Change this file to trigger running the test-container-registry CI job

View File

@ -1,2 +0,0 @@
Change this file to trigger running the test-database CI job
Trigger test run

View File

@ -1,2 +0,0 @@
Change this file to trigger running the test-k8s-deploy CI job
Trigger test on PR branch

View File

@ -29,10 +29,10 @@ chmod +x ~/.docker/cli-plugins/docker-compose
Next decide on a directory where you would like to put the stack-orchestrator program. Typically this would be Next decide on a directory where you would like to put the stack-orchestrator program. Typically this would be
a "user" binary directory such as `~/bin` or perhaps `/usr/local/laconic` or possibly just the current working directory. a "user" binary directory such as `~/bin` or perhaps `/usr/local/laconic` or possibly just the current working directory.
Now, having selected that directory, download the latest release from [this page](https://git.vdb.to/cerc-io/stack-orchestrator/tags) into it (we're using `~/bin` below for concreteness but edit to suit if you selected a different directory). Also be sure that the destination directory exists and is writable: Now, having selected that directory, download the latest release from [this page](https://github.com/cerc-io/stack-orchestrator/tags) into it (we're using `~/bin` below for concreteness but edit to suit if you selected a different directory). Also be sure that the destination directory exists and is writable:
```bash ```bash
curl -L -o ~/bin/laconic-so https://git.vdb.to/cerc-io/stack-orchestrator/releases/download/latest/laconic-so curl -L -o ~/bin/laconic-so https://github.com/cerc-io/stack-orchestrator/releases/latest/download/laconic-so
``` ```
Give it execute permissions: Give it execute permissions:
@ -52,7 +52,7 @@ Version: 1.1.0-7a607c2-202304260513
Save the distribution url to `~/.laconic-so/config.yml`: Save the distribution url to `~/.laconic-so/config.yml`:
```bash ```bash
mkdir ~/.laconic-so mkdir ~/.laconic-so
echo "distribution-url: https://git.vdb.to/cerc-io/stack-orchestrator/releases/download/latest/laconic-so" > ~/.laconic-so/config.yml echo "distribution-url: https://github.com/cerc-io/stack-orchestrator/releases/latest/download/laconic-so" > ~/.laconic-so/config.yml
``` ```
### Update ### Update

View File

@ -0,0 +1,41 @@
# keycloak
Deploys a stand alone [keycloak](https://www.keycloak.org)
## Clone required repositories
```
$ laconic-so --stack keycloak setup-repositories
```
## Build containers
```
$ laconic-so --stack keycloak build-containers
```
## Create a deployment
```
$ laconic-so --stack keycloak deploy init --map-ports-to-host any-same --output keycloak-spec.yml
$ laconic-so deploy create --spec-file keycloak-spec.yml --deployment-dir keycloak-deployment
```
## Start the stack
```
$ laconic-so deployment --dir keycloak-deployment start
```
Display stack status:
```
$ laconic-so deployment --dir keycloak-deployment ps
Running containers:
```
See stack logs:
```
$ laconic-so deployment --dir keycloak-deployment logs
```

View File

@ -0,0 +1,14 @@
# Copyright © 2023 Vulcanize
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http:#www.gnu.org/licenses/>.

View File

@ -0,0 +1,13 @@
version: "1.0"
name: keycloak
description: "Keycloak"
repos:
- git.vdb.to/cerc-io/keycloak-reg-api
- git.vdb.to/cerc-io/keycloak-reg-ui
containers:
- cerc/keycloak
- cerc/keycloak-reg-api
- cerc/keycloak-reg-ui
- cerc/webapp-base
pods:
- keycloak

View File

@ -26,7 +26,7 @@ In addition to the pre-requisites listed in the [README](/README.md), the follow
1. Clone this repository: 1. Clone this repository:
``` ```
$ git clone https://git.vdb.to/cerc-io/stack-orchestrator.git $ git clone https://github.com/cerc-io/stack-orchestrator.git
``` ```
2. Enter the project directory: 2. Enter the project directory:

View File

@ -1,10 +1,10 @@
# Adding a new stack # Adding a new stack
See [this PR](https://git.vdb.to/cerc-io/stack-orchestrator/pull/434) for an example of how to currently add a minimal stack to stack orchestrator. The [reth stack](https://git.vdb.to/cerc-io/stack-orchestrator/pull/435) is another good example. See [this PR](https://github.com/cerc-io/stack-orchestrator/pull/434) for an example of how to currently add a minimal stack to stack orchestrator. The [reth stack](https://github.com/cerc-io/stack-orchestrator/pull/435) is another good example.
For external developers, we recommend forking this repo and adding your stack directly to your fork. This initially requires running in "developer mode" as described [here](/docs/CONTRIBUTING.md). Check out the [Namada stack](https://github.com/vknowable/stack-orchestrator/blob/main/app/data/stacks/public-namada/digitalocean_quickstart.md) from Knowable to see how that is done. For external developers, we recommend forking this repo and adding your stack directly to your fork. This initially requires running in "developer mode" as described [here](/docs/CONTRIBUTING.md). Check out the [Namada stack](https://github.com/vknowable/stack-orchestrator/blob/main/app/data/stacks/public-namada/digitalocean_quickstart.md) from Knowable to see how that is done.
Core to the feature completeness of stack orchestrator is to [decouple the tool functionality from payload](https://git.vdb.to/cerc-io/stack-orchestrator/issues/315) which will no longer require forking to add a stack. Core to the feature completeness of stack orchestrator is to [decouple the tool functionality from payload](https://github.com/cerc-io/stack-orchestrator/issues/315) which will no longer require forking to add a stack.
## Example ## Example

View File

@ -1,6 +1,6 @@
# Specification # Specification
Note: this page is out of date (but still useful) - it will no longer be useful once stacks are [decoupled from the tool functionality](https://git.vdb.to/cerc-io/stack-orchestrator/issues/315). Note: this page is out of date (but still useful) - it will no longer be useful once stacks are [decoupled from the tool functionality](https://github.com/cerc-io/stack-orchestrator/issues/315).
## Implementation ## Implementation

View File

@ -10,4 +10,3 @@ pydantic==1.10.9
tomli==2.0.1 tomli==2.0.1
validators==0.22.0 validators==0.22.0
kubernetes>=28.1.0 kubernetes>=28.1.0
humanfriendly>=10.0

View File

@ -41,4 +41,4 @@ runcmd:
- apt-get install -y docker-ce docker-ce-cli containerd.io docker-buildx-plugin docker-compose-plugin - apt-get install -y docker-ce docker-ce-cli containerd.io docker-buildx-plugin docker-compose-plugin
- systemctl enable docker - systemctl enable docker
- systemctl start docker - systemctl start docker
- git clone https://git.vdb.to/cerc-io/stack-orchestrator.git /home/ubuntu/stack-orchestrator - git clone https://github.com/cerc-io/stack-orchestrator.git /home/ubuntu/stack-orchestrator

View File

@ -31,5 +31,5 @@ runcmd:
- apt-get install -y docker-ce docker-ce-cli containerd.io docker-buildx-plugin docker-compose-plugin - apt-get install -y docker-ce docker-ce-cli containerd.io docker-buildx-plugin docker-compose-plugin
- systemctl enable docker - systemctl enable docker
- systemctl start docker - systemctl start docker
- curl -L -o /usr/local/bin/laconic-so https://git.vdb.to/cerc-io/stack-orchestrator/releases/download/latest/laconic-so - curl -L -o /usr/local/bin/laconic-so https://github.com/cerc-io/stack-orchestrator/releases/latest/download/laconic-so
- chmod +x /usr/local/bin/laconic-so - chmod +x /usr/local/bin/laconic-so

View File

@ -1,19 +0,0 @@
#!/usr/bin/env bash
# Beginnings of a script to quickly spin up and test a deployment
if [[ -n "$CERC_SCRIPT_DEBUG" ]]; then
set -x
fi
if [[ -n "$1" ]]; then
stack_name=$1
else
stack_name="test"
fi
spec_file_name="${stack_name}-spec.yml"
deployment_dir_name="${stack_name}-deployment"
rm -f ${spec_file_name}
rm -rf ${deployment_dir_name}
laconic-so --stack ${stack_name} deploy --deploy-to compose init --output ${spec_file_name}
laconic-so --stack ${stack_name} deploy --deploy-to compose create --deployment-dir ${deployment_dir_name} --spec-file ${spec_file_name}
#laconic-so deployment --dir ${deployment_dir_name} start
#laconic-so deployment --dir ${deployment_dir_name} ps
#laconic-so deployment --dir ${deployment_dir_name} stop

View File

@ -137,7 +137,7 @@ fi
echo "**************************************************************************************" echo "**************************************************************************************"
echo "Installing laconic-so" echo "Installing laconic-so"
# install latest `laconic-so` # install latest `laconic-so`
distribution_url=https://git.vdb.to/cerc-io/stack-orchestrator/releases/download/latest/laconic-so distribution_url=https://github.com/cerc-io/stack-orchestrator/releases/latest/download/laconic-so
install_filename=${install_dir}/laconic-so install_filename=${install_dir}/laconic-so
mkdir -p ${install_dir} mkdir -p ${install_dir}
curl -L -o ${install_filename} ${distribution_url} curl -L -o ${install_filename} ${distribution_url}

View File

@ -13,7 +13,7 @@ setup(
description='Orchestrates deployment of the Laconic stack', description='Orchestrates deployment of the Laconic stack',
long_description=long_description, long_description=long_description,
long_description_content_type="text/markdown", long_description_content_type="text/markdown",
url='https://git.vdb.to/cerc-io/stack-orchestrator', url='https://github.com/cerc-io/stack-orchestrator',
py_modules=['stack_orchestrator'], py_modules=['stack_orchestrator'],
packages=find_packages(), packages=find_packages(),
install_requires=[requirements], install_requires=[requirements],

View File

@ -27,13 +27,12 @@ import subprocess
import click import click
import importlib.resources import importlib.resources
from pathlib import Path from pathlib import Path
from stack_orchestrator.util import include_exclude_check, get_parsed_stack_config, stack_is_external, warn_exit from stack_orchestrator.util import include_exclude_check, get_parsed_stack_config, stack_is_external
from stack_orchestrator.base import get_npm_registry_url from stack_orchestrator.base import get_npm_registry_url
# TODO: find a place for this # TODO: find a place for this
# epilog="Config provided either in .env or settings.ini or env vars: CERC_REPO_BASE_DIR (defaults to ~/cerc)" # epilog="Config provided either in .env or settings.ini or env vars: CERC_REPO_BASE_DIR (defaults to ~/cerc)"
def make_container_build_env(dev_root_path: str, def make_container_build_env(dev_root_path: str,
container_build_dir: str, container_build_dir: str,
debug: bool, debug: bool,
@ -105,9 +104,6 @@ def process_container(stack: str,
build_command = os.path.join(container_build_dir, build_command = os.path.join(container_build_dir,
"default-build.sh") + f" {default_container_tag} {repo_dir_or_build_dir}" "default-build.sh") + f" {default_container_tag} {repo_dir_or_build_dir}"
if not dry_run: if not dry_run:
# No PATH at all causes failures with podman.
if "PATH" not in container_build_env:
container_build_env["PATH"] = os.environ["PATH"]
if verbose: if verbose:
print(f"Executing: {build_command} with environment: {container_build_env}") print(f"Executing: {build_command} with environment: {container_build_env}")
build_result = subprocess.run(build_command, shell=True, env=container_build_env) build_result = subprocess.run(build_command, shell=True, env=container_build_env)
@ -123,7 +119,6 @@ def process_container(stack: str,
else: else:
print("Skipped") print("Skipped")
@click.command() @click.command()
@click.option('--include', help="only build these containers") @click.option('--include', help="only build these containers")
@click.option('--exclude', help="don\'t build these containers") @click.option('--exclude', help="don\'t build these containers")
@ -164,8 +159,6 @@ def command(ctx, include, exclude, force_rebuild, extra_build_args):
containers_in_scope = [] containers_in_scope = []
if stack: if stack:
stack_config = get_parsed_stack_config(stack) stack_config = get_parsed_stack_config(stack)
if "containers" not in stack_config or stack_config["containers"] is None:
warn_exit(f"stack {stack} does not define any containers")
containers_in_scope = stack_config['containers'] containers_in_scope = stack_config['containers']
else: else:
containers_in_scope = all_containers containers_in_scope = all_containers

View File

@ -25,11 +25,10 @@ from decouple import config
import click import click
from pathlib import Path from pathlib import Path
from stack_orchestrator.build import build_containers from stack_orchestrator.build import build_containers
from stack_orchestrator.deploy.webapp.util import determine_base_container
@click.command() @click.command()
@click.option('--base-container') @click.option('--base-container', default="cerc/nextjs-base")
@click.option('--source-repo', help="directory containing the webapp to build", required=True) @click.option('--source-repo', help="directory containing the webapp to build", required=True)
@click.option("--force-rebuild", is_flag=True, default=False, help="Override dependency checking -- always rebuild") @click.option("--force-rebuild", is_flag=True, default=False, help="Override dependency checking -- always rebuild")
@click.option("--extra-build-args", help="Supply extra arguments to build") @click.option("--extra-build-args", help="Supply extra arguments to build")
@ -58,9 +57,6 @@ def command(ctx, base_container, source_repo, force_rebuild, extra_build_args, t
if not quiet: if not quiet:
print(f'Dev Root is: {dev_root_path}') print(f'Dev Root is: {dev_root_path}')
if not base_container:
base_container = determine_base_container(source_repo)
# First build the base container. # First build the base container.
container_build_env = build_containers.make_container_build_env(dev_root_path, container_build_dir, debug, container_build_env = build_containers.make_container_build_env(dev_root_path, container_build_dir, debug,
force_rebuild, extra_build_args) force_rebuild, extra_build_args)
@ -68,6 +64,7 @@ def command(ctx, base_container, source_repo, force_rebuild, extra_build_args, t
build_containers.process_container(None, base_container, container_build_dir, container_build_env, dev_root_path, quiet, build_containers.process_container(None, base_container, container_build_dir, container_build_env, dev_root_path, quiet,
verbose, dry_run, continue_on_error) verbose, dry_run, continue_on_error)
# Now build the target webapp. We use the same build script, but with a different Dockerfile and work dir. # Now build the target webapp. We use the same build script, but with a different Dockerfile and work dir.
container_build_env["CERC_WEBAPP_BUILD_RUNNING"] = "true" container_build_env["CERC_WEBAPP_BUILD_RUNNING"] = "true"
container_build_env["CERC_CONTAINER_BUILD_WORK_DIR"] = os.path.abspath(source_repo) container_build_env["CERC_CONTAINER_BUILD_WORK_DIR"] = os.path.abspath(source_repo)

View File

@ -27,12 +27,6 @@ kube_config_key = "kube-config"
deploy_to_key = "deploy-to" deploy_to_key = "deploy-to"
network_key = "network" network_key = "network"
http_proxy_key = "http-proxy" http_proxy_key = "http-proxy"
image_registry_key = "image-registry" image_resigtry_key = "image-registry"
configmaps_key = "configmaps"
resources_key = "resources"
volumes_key = "volumes"
security_key = "security"
annotations_key = "annotations"
labels_key = "labels"
kind_config_filename = "kind-config.yml" kind_config_filename = "kind-config.yml"
kube_config_filename = "kubeconfig.yml" kube_config_filename = "kubeconfig.yml"

View File

@ -1,13 +0,0 @@
services:
registry:
image: registry:2.8
restart: always
environment:
REGISTRY_LOG_LEVEL: ${REGISTRY_LOG_LEVEL}
volumes:
- registry-data:/var/lib/registry
ports:
- "5000"
volumes:
registry-data:

View File

@ -1,36 +0,0 @@
version: '3.7'
services:
# Runs an Urbit fake ship and attempts an app installation using given data
# Uploads the app glob to given IPFS endpoint
# From urbit_app_builds volume:
# - takes app build from ${CERC_URBIT_APP}/build (waits for it to appear)
# - takes additional mark files from ${CERC_URBIT_APP}/mar
# - takes the docket file from ${CERC_URBIT_APP}/desk.docket-0
urbit-fake-ship:
restart: unless-stopped
image: tloncorp/vere
environment:
CERC_SCRIPT_DEBUG: ${CERC_SCRIPT_DEBUG}
CERC_URBIT_APP: ${CERC_URBIT_APP}
CERC_ENABLE_APP_INSTALL: ${CERC_ENABLE_APP_INSTALL:-true}
CERC_IPFS_GLOB_HOST_ENDPOINT: ${CERC_IPFS_GLOB_HOST_ENDPOINT:-http://ipfs:5001}
CERC_IPFS_SERVER_ENDPOINT: ${CERC_IPFS_SERVER_ENDPOINT:-http://ipfs:8080}
entrypoint: ["bash", "-c", "./run-urbit-ship.sh && ./deploy-app.sh && tail -f /dev/null"]
volumes:
- urbit_data:/urbit
- urbit_app_builds:/app-builds
- ../config/urbit/run-urbit-ship.sh:/urbit/run-urbit-ship.sh
- ../config/urbit/deploy-app.sh:/urbit/deploy-app.sh
ports:
- "80"
healthcheck:
test: ["CMD", "nc", "-v", "localhost", "80"]
interval: 20s
timeout: 5s
retries: 15
start_period: 10s
volumes:
urbit_data:
urbit_app_builds:

View File

@ -1,23 +0,0 @@
version: "3.7"
services:
grafana:
image: grafana/grafana:10.2.2
restart: always
environment:
GF_SERVER_ROOT_URL: ${GF_SERVER_ROOT_URL}
volumes:
- ../config/monitoring/grafana/provisioning:/etc/grafana/provisioning
- ../config/monitoring/grafana/dashboards:/etc/grafana/dashboards
- grafana_storage:/var/lib/grafana
ports:
- "3000"
healthcheck:
test: ["CMD", "nc", "-vz", "localhost", "3000"]
interval: 30s
timeout: 5s
retries: 10
start_period: 3s
volumes:
grafana_storage:

View File

@ -1,24 +1,13 @@
version: "3.2" version: "3.2"
# See: https://docs.ipfs.tech/install/run-ipfs-inside-docker/#set-up # See: https://docs.ipfs.tech/install/run-ipfs-inside-docker/#set-up
services: services:
ipfs: ipfs:
image: ipfs/kubo:v0.24.0 image: ipfs/kubo:master-2023-02-20-714a968
restart: always restart: always
volumes: volumes:
- ipfs-import:/import - ./ipfs/import:/import
- ipfs-data:/data/ipfs - ./ipfs/data:/data/ipfs
ports: ports:
- "4001" - "0.0.0.0:8080:8080"
- "8080" - "0.0.0.0:4001:4001"
- "0.0.0.0:5001:5001" - "0.0.0.0:5001:5001"
healthcheck:
test: ["CMD", "nc", "-v", "localhost", "5001"]
interval: 20s
timeout: 5s
retries: 15
start_period: 10s
volumes:
ipfs-import:
ipfs-data:

View File

@ -1,12 +0,0 @@
version: "3.2"
services:
mars:
image: cerc/mars-v2:local
restart: always
ports:
- "3000:3000"
environment:
- URL_OSMOSIS_REST=https://lcd-osmosis.blockapsis.com
- URL_OSMOSIS_RPC=https://rpc-osmosis.blockapsis.com
- WALLET_CONNECT_ID=0x0x0x0x0x0x0x0x0x0x0x0x0x0x0x0x0x0x0x0x

View File

@ -1,20 +0,0 @@
version: "3.2"
services:
mars:
image: cerc/mars:local
restart: always
ports:
- "3000:3000"
environment:
- URL_OSMOSIS_GQL=https://osmosis-node.marsprotocol.io/GGSFGSFGFG34/osmosis-hive-front/graphql
- URL_OSMOSIS_REST=https://lcd-osmosis.blockapsis.com
- URL_OSMOSIS_RPC=https://rpc-osmosis.blockapsis.com
- URL_NEUTRON_GQL=https://neutron.rpc.p2p.world/qgrnU6PsQZA8F9S5Fb8Fn3tV3kXmMBl2M9bcc9jWLjQy8p/hive/graphql
- URL_NEUTRON_REST=https://rest-kralum.neutron-1.neutron.org
- URL_NEUTRON_RPC=https://rpc-kralum.neutron-1.neutron.org
- URL_NEUTRON_TEST_GQL=https://testnet-neutron-gql.marsprotocol.io/graphql
- URL_NEUTRON_TEST_REST=https://rest-palvus.pion-1.ntrn.tech
- URL_NEUTRON_TEST_RPC=https://rpc-palvus.pion-1.ntrn.tech
- WALLET_CONNECT_ID=0x0x0x0x0x0x0x0x0x0x0x0x0x0x0x0x0x0x0x0x

View File

@ -1,15 +0,0 @@
version: '3.8'
services:
node-exporter:
image: prom/node-exporter:latest
restart: unless-stopped
command:
- '--web.listen-address=:9100'
- '--path.rootfs=/host'
- '--collector.systemd'
- '--collector.processes'
network_mode: host
pid: host
volumes:
- '/:/host:ro,rslave'

View File

@ -1,22 +0,0 @@
version: "3.2"
services:
osmosis-front-end:
image: cerc/osmosis-front-end-urbit:local
restart: on-failure
environment:
- NEXT_PUBLIC_WEB_API_BASE_URL=${CERC_WEB_API_BASE_URL}
- ASSET_LIST_COMMIT_HASH=a326bcefc51372b4912be5a2a2fa84a5d142a438
- NEXT_PUBLIC_BASEPATH=/apps/osmosis
- NEXT_PUBLIC_URBIT_DEPLOYMENT=true
working_dir: /app/packages/web
command: ["./build-app-for-urbit.sh"]
volumes:
- ../config/osmosis/build-app-for-urbit.sh:/app/packages/web/build-app-for-urbit.sh
- ../config/osmosis/.env.production:/app/packages/web/.env.production
- urbit_app_builds:/app-builds
- ../config/osmosis/urbit-files/mar:/app/packages/web/mar
- ../config/osmosis/urbit-files/desk.docket-0:/app/packages/web/desk.docket-0
volumes:
urbit_app_builds:

View File

@ -1,36 +0,0 @@
version: "3.2"
services:
osmosis-front-end:
image: cerc/osmosis-front-end:local
restart: on-failure
environment:
- NEXT_PUBLIC_WEB_API_BASE_URL=${CERC_WEB_API_BASE_URL}
- ASSET_LIST_COMMIT_HASH=a326bcefc51372b4912be5a2a2fa84a5d142a438
working_dir: /app/packages/web
command: ["./build-app.sh"]
volumes:
- ../config/osmosis/build-app.sh:/app/packages/web/build-app.sh
- ../config/osmosis/.env.production:/app/packages/web/.env.production
- app_builds:/app-builds
nginx:
image: nginx:1.23-alpine
restart: always
depends_on:
osmosis-front-end:
condition: service_completed_successfully
volumes:
- ../config/osmosis/nginx:/etc/nginx/conf.d
- app_builds:/usr/share/nginx
ports:
- "80"
healthcheck:
test: ["CMD", "nc", "-vz", "localhost", "80"]
interval: 20s
timeout: 5s
retries: 15
start_period: 5s
volumes:
app_builds:

View File

@ -1,8 +0,0 @@
version: "3.2"
services:
ping-pub:
image: cerc/ping-pub:local
restart: always
ports:
- "5173:5173"

View File

@ -1,57 +0,0 @@
version: "3.7"
services:
prometheus:
image: prom/prometheus:v2.49.1
restart: always
volumes:
- ../config/monitoring/prometheus:/etc/prometheus
- prometheus_data:/prometheus
ports:
- "9090"
healthcheck:
test: ["CMD", "nc", "-vz", "localhost", "9090"]
interval: 30s
timeout: 5s
retries: 10
start_period: 3s
extra_hosts:
- "host.docker.internal:host-gateway"
blackbox:
image: prom/blackbox-exporter:latest
restart: always
volumes:
- ../config/monitoring/blackbox.yml:/etc/blackbox_exporter/config.yml
ports:
- '9115'
extra_hosts:
- "host.docker.internal:host-gateway"
chain-head-exporter:
image: cerc/watcher-ts:local
restart: always
working_dir: /app/packages/cli
environment:
ETH_RPC_ENDPOINT: ${CERC_ETH_RPC_ENDPOINT}
FIL_RPC_ENDPOINT: ${CERC_FIL_RPC_ENDPOINT}
ETH_RPC_API_KEY: ${CERC_INFURA_KEY}
PORT: ${CERC_METRICS_PORT}
command: ["sh", "-c", "yarn export-metrics:chain-heads"]
ports:
- '5000'
extra_hosts:
- "host.docker.internal:host-gateway"
postgres-exporter:
image: quay.io/prometheuscommunity/postgres-exporter
restart: always
volumes:
- ../config/monitoring/postgres-exporter.yml:/postgres_exporter.yml
ports:
- '9187'
extra_hosts:
- "host.docker.internal:host-gateway"
volumes:
prometheus_data:

View File

@ -6,7 +6,7 @@ services:
restart: on-failure restart: on-failure
working_dir: /app/packages/cli working_dir: /app/packages/cli
environment: environment:
ENABLE_PROXY: ${CERC_ENABLE_PROXY:-true} ENABLE_PROXY: ${ENABLE_PROXY:-true}
PROXY_UPSTREAM: ${CERC_PROXY_UPSTREAM} PROXY_UPSTREAM: ${CERC_PROXY_UPSTREAM}
PROXY_ORIGIN_HEADER: ${CERC_PROXY_ORIGIN_HEADER} PROXY_ORIGIN_HEADER: ${CERC_PROXY_ORIGIN_HEADER}
command: ["sh", "-c", "./run.sh"] command: ["sh", "-c", "./run.sh"]

View File

@ -1,20 +0,0 @@
services:
database:
image: cerc/test-database-container:local
restart: always
volumes:
- db-data:/var/lib/postgresql/data
environment:
POSTGRES_USER: "test-user"
POSTGRES_DB: "test-db"
POSTGRES_PASSWORD: "password"
POSTGRES_INITDB_ARGS: "-E UTF8 --locale=C"
ports:
- "5432"
test-client:
image: cerc/test-database-client:local
volumes:
db-data:

View File

@ -5,15 +5,10 @@ services:
environment: environment:
CERC_SCRIPT_DEBUG: ${CERC_SCRIPT_DEBUG} CERC_SCRIPT_DEBUG: ${CERC_SCRIPT_DEBUG}
CERC_TEST_PARAM_1: ${CERC_TEST_PARAM_1:-FAILED} CERC_TEST_PARAM_1: ${CERC_TEST_PARAM_1:-FAILED}
CERC_TEST_PARAM_2: "CERC_TEST_PARAM_2_VALUE"
volumes: volumes:
- test-data-bind:/data - test-data:/data
- test-data-auto:/data2
- test-config:/config:ro
ports: ports:
- "80" - "80"
volumes: volumes:
test-data-bind: test-data:
test-data-auto:
test-config:

View File

@ -9,10 +9,9 @@ services:
- REACT_APP_AWS_API_ENDPOINT=${CERC_UNISWAP_GQL} - REACT_APP_AWS_API_ENDPOINT=${CERC_UNISWAP_GQL}
command: ["./build-app.sh"] command: ["./build-app.sh"]
volumes: volumes:
- app_builds:/app-builds
- ../config/uniswap-interface/build-app.sh:/app/build-app.sh - ../config/uniswap-interface/build-app.sh:/app/build-app.sh
- urbit_app_builds:/app-builds
- ../config/uniswap-interface/urbit-files/mar:/app/mar
- ../config/uniswap-interface/urbit-files/desk.docket-0:/app/desk.docket-0
volumes: volumes:
urbit_app_builds: app_builds:
app_globs:

View File

@ -0,0 +1,46 @@
version: '3.7'
services:
urbit-fake-ship:
restart: unless-stopped
image: tloncorp/vere
environment:
CERC_IPFS_GLOB_HOST_ENDPOINT: ${CERC_IPFS_GLOB_HOST_ENDPOINT:-http://ipfs-glob-host:5001}
CERC_IPFS_SERVER_ENDPOINT: ${CERC_IPFS_SERVER_ENDPOINT:-http://ipfs-glob-host:8080}
entrypoint: ["bash", "-c", "./run-urbit-ship.sh && ./deploy-uniswap-app.sh && tail -f /dev/null"]
volumes:
- urbit_data:/urbit
- app_builds:/app-builds
- app_globs:/app-globs
- ../config/urbit/run-urbit-ship.sh:/urbit/run-urbit-ship.sh
- ../config/uniswap-interface/deploy-uniswap-app.sh:/urbit/deploy-uniswap-app.sh
ports:
- "80"
healthcheck:
test: ["CMD", "nc", "-v", "localhost", "80"]
interval: 20s
timeout: 5s
retries: 15
start_period: 10s
ipfs-glob-host:
image: ipfs/kubo:master-2023-02-20-714a968
volumes:
- ipfs-import:/import
- ipfs-data:/data/ipfs
ports:
- "8080"
- "5001"
healthcheck:
test: ["CMD", "nc", "-v", "localhost", "5001"]
interval: 20s
timeout: 5s
retries: 15
start_period: 10s
volumes:
urbit_data:
app_builds:
app_globs:
ipfs-import:
ipfs-data:

View File

@ -35,7 +35,7 @@ services:
- ../config/watcher-merkl-sushiswap-v3/watcher-config-template.toml:/app/environments/watcher-config-template.toml - ../config/watcher-merkl-sushiswap-v3/watcher-config-template.toml:/app/environments/watcher-config-template.toml
- ../config/watcher-merkl-sushiswap-v3/start-job-runner.sh:/app/start-job-runner.sh - ../config/watcher-merkl-sushiswap-v3/start-job-runner.sh:/app/start-job-runner.sh
ports: ports:
- "9002:9000" - "9000"
healthcheck: healthcheck:
test: ["CMD", "nc", "-v", "localhost", "9000"] test: ["CMD", "nc", "-v", "localhost", "9000"]
interval: 20s interval: 20s
@ -62,7 +62,7 @@ services:
- ../config/watcher-merkl-sushiswap-v3/start-server.sh:/app/start-server.sh - ../config/watcher-merkl-sushiswap-v3/start-server.sh:/app/start-server.sh
ports: ports:
- "127.0.0.1:3007:3008" - "127.0.0.1:3007:3008"
- "9003:9001" - "9001"
healthcheck: healthcheck:
test: ["CMD", "nc", "-v", "localhost", "3008"] test: ["CMD", "nc", "-v", "localhost", "3008"]
interval: 20s interval: 20s

View File

@ -35,7 +35,7 @@ services:
- ../config/watcher-sushiswap-v3/watcher-config-template.toml:/app/environments/watcher-config-template.toml - ../config/watcher-sushiswap-v3/watcher-config-template.toml:/app/environments/watcher-config-template.toml
- ../config/watcher-sushiswap-v3/start-job-runner.sh:/app/start-job-runner.sh - ../config/watcher-sushiswap-v3/start-job-runner.sh:/app/start-job-runner.sh
ports: ports:
- "9000:9000" - "9000"
healthcheck: healthcheck:
test: ["CMD", "nc", "-v", "localhost", "9000"] test: ["CMD", "nc", "-v", "localhost", "9000"]
interval: 20s interval: 20s
@ -62,7 +62,7 @@ services:
- ../config/watcher-sushiswap-v3/start-server.sh:/app/start-server.sh - ../config/watcher-sushiswap-v3/start-server.sh:/app/start-server.sh
ports: ports:
- "127.0.0.1:3008:3008" - "127.0.0.1:3008:3008"
- "9001:9001" - "9001"
healthcheck: healthcheck:
test: ["CMD", "nc", "-v", "localhost", "3008"] test: ["CMD", "nc", "-v", "localhost", "3008"]
interval: 20s interval: 20s

View File

@ -5,4 +5,4 @@ services:
environment: environment:
CERC_SCRIPT_DEBUG: ${CERC_SCRIPT_DEBUG} CERC_SCRIPT_DEBUG: ${CERC_SCRIPT_DEBUG}
ports: ports:
- "80" - "3000"

View File

@ -1,7 +0,0 @@
modules:
http_2xx:
prober: http
timeout: 5s
http:
valid_status_codes: [] #default to 2xx
method: GET

View File

@ -1,943 +0,0 @@
{
"annotations": {
"list": [
{
"builtIn": 1,
"datasource": {
"type": "datasource",
"uid": "grafana"
},
"enable": true,
"hide": true,
"iconColor": "rgba(0, 211, 255, 1)",
"name": "Annotations & Alerts",
"type": "dashboard"
}
]
},
"description": "node.js prometheus client basic metrics",
"editable": true,
"fiscalYearStartMonth": 0,
"gnetId": 11159,
"graphTooltip": 0,
"id": 15,
"links": [],
"liveNow": false,
"panels": [
{
"aliasColors": {},
"bars": false,
"dashLength": 10,
"dashes": false,
"datasource": {
"type": "prometheus",
"uid": "PBFA97CFB590B2093"
},
"fill": 1,
"fillGradient": 0,
"gridPos": {
"h": 7,
"w": 10,
"x": 0,
"y": 0
},
"hiddenSeries": false,
"id": 6,
"legend": {
"alignAsTable": true,
"avg": true,
"current": true,
"max": true,
"min": true,
"show": true,
"total": false,
"values": true
},
"lines": true,
"linewidth": 1,
"links": [],
"nullPointMode": "null",
"options": {
"alertThreshold": true
},
"paceLength": 10,
"percentage": false,
"pluginVersion": "10.2.2",
"pointradius": 2,
"points": false,
"renderer": "flot",
"seriesOverrides": [],
"spaceLength": 10,
"stack": false,
"steppedLine": false,
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "PBFA97CFB590B2093"
},
"expr": "irate(process_cpu_user_seconds_total{instance=~\"$instance\"}[2m]) * 100",
"format": "time_series",
"intervalFactor": 1,
"legendFormat": "User CPU - {{instance}}",
"refId": "A"
},
{
"datasource": {
"type": "prometheus",
"uid": "PBFA97CFB590B2093"
},
"expr": "irate(process_cpu_system_seconds_total{instance=~\"$instance\"}[2m]) * 100",
"format": "time_series",
"intervalFactor": 1,
"legendFormat": "Sys CPU - {{instance}}",
"refId": "B"
}
],
"thresholds": [],
"timeRegions": [],
"title": "Process CPU Usage",
"tooltip": {
"shared": true,
"sort": 0,
"value_type": "individual"
},
"type": "graph",
"xaxis": {
"mode": "time",
"show": true,
"values": []
},
"yaxes": [
{
"format": "percent",
"logBase": 1,
"show": true
},
{
"format": "short",
"logBase": 1,
"show": true
}
],
"yaxis": {
"align": false
}
},
{
"aliasColors": {},
"bars": false,
"dashLength": 10,
"dashes": false,
"datasource": {
"type": "prometheus",
"uid": "PBFA97CFB590B2093"
},
"fill": 1,
"fillGradient": 0,
"gridPos": {
"h": 7,
"w": 9,
"x": 10,
"y": 0
},
"hiddenSeries": false,
"id": 8,
"legend": {
"alignAsTable": true,
"avg": true,
"current": true,
"max": true,
"min": true,
"show": true,
"total": false,
"values": true
},
"lines": true,
"linewidth": 1,
"links": [],
"nullPointMode": "null",
"options": {
"alertThreshold": true
},
"paceLength": 10,
"percentage": false,
"pluginVersion": "10.2.2",
"pointradius": 2,
"points": false,
"renderer": "flot",
"seriesOverrides": [],
"spaceLength": 10,
"stack": false,
"steppedLine": false,
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "PBFA97CFB590B2093"
},
"expr": "nodejs_eventloop_lag_seconds{instance=~\"$instance\"}",
"format": "time_series",
"intervalFactor": 1,
"legendFormat": "{{instance}}",
"refId": "A"
}
],
"thresholds": [],
"timeRegions": [],
"title": "Event Loop Lag",
"tooltip": {
"shared": true,
"sort": 0,
"value_type": "individual"
},
"type": "graph",
"xaxis": {
"mode": "time",
"show": true,
"values": []
},
"yaxes": [
{
"format": "s",
"logBase": 1,
"show": true
},
{
"format": "short",
"logBase": 1,
"show": true
}
],
"yaxis": {
"align": false
}
},
{
"datasource": {
"type": "prometheus",
"uid": "PBFA97CFB590B2093"
},
"fieldConfig": {
"defaults": {
"color": {
"fixedColor": "text",
"mode": "fixed"
},
"mappings": [
{
"options": {
"match": "null",
"result": {
"text": "N/A"
}
},
"type": "special"
}
],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
}
]
},
"unit": "none"
},
"overrides": []
},
"gridPos": {
"h": 3,
"w": 5,
"x": 19,
"y": 0
},
"id": 2,
"interval": "",
"links": [],
"maxDataPoints": 100,
"options": {
"colorMode": "none",
"graphMode": "none",
"justifyMode": "auto",
"orientation": "auto",
"reduceOptions": {
"calcs": [
"mean"
],
"fields": "",
"values": false
},
"textMode": "name",
"wideLayout": true
},
"pluginVersion": "10.2.2",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "PBFA97CFB590B2093"
},
"expr": "sum(nodejs_version_info{instance=~\"$instance\"}) by (version)",
"format": "time_series",
"instant": false,
"interval": "",
"intervalFactor": 1,
"legendFormat": "{{version}}",
"refId": "A"
}
],
"title": "Node.js Version",
"type": "stat"
},
{
"datasource": {
"type": "prometheus",
"uid": "PBFA97CFB590B2093"
},
"fieldConfig": {
"defaults": {
"color": {
"fixedColor": "#F2495C",
"mode": "fixed"
},
"mappings": [
{
"options": {
"match": "null",
"result": {
"text": "N/A"
}
},
"type": "special"
}
],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "red",
"value": 80
}
]
},
"unit": "none"
},
"overrides": []
},
"gridPos": {
"h": 4,
"w": 5,
"x": 19,
"y": 3
},
"id": 4,
"links": [],
"maxDataPoints": 100,
"options": {
"colorMode": "none",
"graphMode": "area",
"justifyMode": "auto",
"orientation": "horizontal",
"reduceOptions": {
"calcs": [
"lastNotNull"
],
"fields": "",
"values": false
},
"textMode": "auto",
"wideLayout": true
},
"pluginVersion": "10.2.2",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "PBFA97CFB590B2093"
},
"expr": "sum(changes(process_start_time_seconds{instance=~\"$instance\"}[1m]))",
"format": "time_series",
"intervalFactor": 1,
"legendFormat": "{{instance}}",
"refId": "A"
}
],
"title": "Process Restart Times",
"type": "stat"
},
{
"aliasColors": {},
"bars": false,
"dashLength": 10,
"dashes": false,
"datasource": {
"type": "prometheus",
"uid": "PBFA97CFB590B2093"
},
"fill": 1,
"fillGradient": 0,
"gridPos": {
"h": 7,
"w": 16,
"x": 0,
"y": 7
},
"hiddenSeries": false,
"id": 7,
"legend": {
"alignAsTable": true,
"avg": true,
"current": true,
"max": true,
"min": true,
"rightSide": true,
"show": true,
"total": false,
"values": true
},
"lines": true,
"linewidth": 1,
"links": [],
"nullPointMode": "null",
"options": {
"alertThreshold": true
},
"paceLength": 10,
"percentage": false,
"pluginVersion": "10.2.2",
"pointradius": 2,
"points": false,
"renderer": "flot",
"seriesOverrides": [],
"spaceLength": 10,
"stack": false,
"steppedLine": false,
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "PBFA97CFB590B2093"
},
"expr": "process_resident_memory_bytes{instance=~\"$instance\"}",
"format": "time_series",
"intervalFactor": 1,
"legendFormat": "Process Memory - {{instance}}",
"refId": "A"
},
{
"datasource": {
"type": "prometheus",
"uid": "PBFA97CFB590B2093"
},
"expr": "nodejs_heap_size_total_bytes{instance=~\"$instance\"}",
"format": "time_series",
"intervalFactor": 1,
"legendFormat": "Heap Total - {{instance}}",
"refId": "B"
},
{
"datasource": {
"type": "prometheus",
"uid": "PBFA97CFB590B2093"
},
"expr": "nodejs_heap_size_used_bytes{instance=~\"$instance\"}",
"format": "time_series",
"intervalFactor": 1,
"legendFormat": "Heap Used - {{instance}}",
"refId": "C"
},
{
"datasource": {
"type": "prometheus",
"uid": "PBFA97CFB590B2093"
},
"expr": "nodejs_external_memory_bytes{instance=~\"$instance\"}",
"format": "time_series",
"intervalFactor": 1,
"legendFormat": "External Memory - {{instance}}",
"refId": "D"
}
],
"thresholds": [],
"timeRegions": [],
"title": "Process Memory Usage",
"tooltip": {
"shared": true,
"sort": 0,
"value_type": "individual"
},
"type": "graph",
"xaxis": {
"mode": "time",
"show": true,
"values": []
},
"yaxes": [
{
"format": "bytes",
"logBase": 1,
"show": true
},
{
"format": "short",
"logBase": 1,
"show": true
}
],
"yaxis": {
"align": false
}
},
{
"aliasColors": {},
"bars": false,
"dashLength": 10,
"dashes": false,
"datasource": {
"type": "prometheus",
"uid": "PBFA97CFB590B2093"
},
"fill": 1,
"fillGradient": 0,
"gridPos": {
"h": 7,
"w": 8,
"x": 16,
"y": 7
},
"hiddenSeries": false,
"id": 9,
"legend": {
"alignAsTable": true,
"avg": true,
"current": true,
"max": true,
"min": true,
"show": true,
"total": false,
"values": true
},
"lines": true,
"linewidth": 1,
"links": [],
"nullPointMode": "null",
"options": {
"alertThreshold": true
},
"paceLength": 10,
"percentage": false,
"pluginVersion": "10.2.2",
"pointradius": 2,
"points": false,
"renderer": "flot",
"seriesOverrides": [],
"spaceLength": 10,
"stack": false,
"steppedLine": false,
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "PBFA97CFB590B2093"
},
"expr": "nodejs_active_handles_total{instance=~\"$instance\"}",
"format": "time_series",
"intervalFactor": 1,
"legendFormat": "Active Handler - {{instance}}",
"refId": "A"
},
{
"datasource": {
"type": "prometheus",
"uid": "PBFA97CFB590B2093"
},
"expr": "nodejs_active_requests_total{instance=~\"$instance\"}",
"format": "time_series",
"intervalFactor": 1,
"legendFormat": "Active Request - {{instance}}",
"refId": "B"
}
],
"thresholds": [],
"timeRegions": [],
"title": "Active Handlers/Requests Total",
"tooltip": {
"shared": true,
"sort": 0,
"value_type": "individual"
},
"type": "graph",
"xaxis": {
"mode": "time",
"show": true,
"values": []
},
"yaxes": [
{
"format": "short",
"logBase": 1,
"show": true
},
{
"format": "short",
"logBase": 1,
"show": true
}
],
"yaxis": {
"align": false
}
},
{
"aliasColors": {},
"bars": false,
"dashLength": 10,
"dashes": false,
"datasource": {
"type": "prometheus",
"uid": "PBFA97CFB590B2093"
},
"fill": 1,
"fillGradient": 0,
"gridPos": {
"h": 8,
"w": 8,
"x": 0,
"y": 14
},
"hiddenSeries": false,
"id": 10,
"legend": {
"alignAsTable": true,
"avg": true,
"current": true,
"max": true,
"min": true,
"rightSide": false,
"show": true,
"total": false,
"values": true
},
"lines": true,
"linewidth": 1,
"links": [],
"nullPointMode": "null",
"options": {
"alertThreshold": true
},
"paceLength": 10,
"percentage": false,
"pluginVersion": "10.2.2",
"pointradius": 2,
"points": false,
"renderer": "flot",
"seriesOverrides": [],
"spaceLength": 10,
"stack": false,
"steppedLine": false,
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "PBFA97CFB590B2093"
},
"expr": "nodejs_heap_space_size_total_bytes{instance=~\"$instance\"}",
"format": "time_series",
"intervalFactor": 1,
"legendFormat": "Heap Total - {{instance}} - {{space}}",
"refId": "A"
}
],
"thresholds": [],
"timeRegions": [],
"title": "Heap Total Detail",
"tooltip": {
"shared": true,
"sort": 0,
"value_type": "individual"
},
"type": "graph",
"xaxis": {
"mode": "time",
"show": true,
"values": []
},
"yaxes": [
{
"format": "bytes",
"logBase": 1,
"show": true
},
{
"format": "short",
"logBase": 1,
"show": true
}
],
"yaxis": {
"align": false
}
},
{
"aliasColors": {},
"bars": false,
"dashLength": 10,
"dashes": false,
"datasource": {
"type": "prometheus",
"uid": "PBFA97CFB590B2093"
},
"fill": 1,
"fillGradient": 0,
"gridPos": {
"h": 8,
"w": 8,
"x": 8,
"y": 14
},
"hiddenSeries": false,
"id": 11,
"legend": {
"alignAsTable": true,
"avg": true,
"current": true,
"max": true,
"min": true,
"rightSide": false,
"show": true,
"total": false,
"values": true
},
"lines": true,
"linewidth": 1,
"links": [],
"nullPointMode": "null",
"options": {
"alertThreshold": true
},
"paceLength": 10,
"percentage": false,
"pluginVersion": "10.2.2",
"pointradius": 2,
"points": false,
"renderer": "flot",
"seriesOverrides": [],
"spaceLength": 10,
"stack": false,
"steppedLine": false,
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "PBFA97CFB590B2093"
},
"expr": "nodejs_heap_space_size_used_bytes{instance=~\"$instance\"}",
"format": "time_series",
"intervalFactor": 1,
"legendFormat": "Heap Used - {{instance}} - {{space}}",
"refId": "A"
}
],
"thresholds": [],
"timeRegions": [],
"title": "Heap Used Detail",
"tooltip": {
"shared": true,
"sort": 0,
"value_type": "individual"
},
"type": "graph",
"xaxis": {
"mode": "time",
"show": true,
"values": []
},
"yaxes": [
{
"format": "bytes",
"logBase": 1,
"show": true
},
{
"format": "short",
"logBase": 1,
"show": true
}
],
"yaxis": {
"align": false
}
},
{
"aliasColors": {},
"bars": false,
"dashLength": 10,
"dashes": false,
"datasource": {
"type": "prometheus",
"uid": "PBFA97CFB590B2093"
},
"fill": 1,
"fillGradient": 0,
"gridPos": {
"h": 8,
"w": 8,
"x": 16,
"y": 14
},
"hiddenSeries": false,
"id": 12,
"legend": {
"alignAsTable": true,
"avg": true,
"current": true,
"max": true,
"min": true,
"rightSide": false,
"show": true,
"total": false,
"values": true
},
"lines": true,
"linewidth": 1,
"links": [],
"nullPointMode": "null",
"options": {
"alertThreshold": true
},
"paceLength": 10,
"percentage": false,
"pluginVersion": "10.2.2",
"pointradius": 2,
"points": false,
"renderer": "flot",
"seriesOverrides": [],
"spaceLength": 10,
"stack": false,
"steppedLine": false,
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "PBFA97CFB590B2093"
},
"expr": "nodejs_heap_space_size_available_bytes{instance=~\"$instance\"}",
"format": "time_series",
"intervalFactor": 1,
"legendFormat": "Heap Used - {{instance}} - {{space}}",
"refId": "A"
}
],
"thresholds": [],
"timeRegions": [],
"title": "Heap Available Detail",
"tooltip": {
"shared": true,
"sort": 0,
"value_type": "individual"
},
"type": "graph",
"xaxis": {
"mode": "time",
"show": true,
"values": []
},
"yaxes": [
{
"format": "bytes",
"logBase": 1,
"show": true
},
{
"format": "short",
"logBase": 1,
"show": true
}
],
"yaxis": {
"align": false
}
}
],
"refresh": "10s",
"schemaVersion": 38,
"tags": [
"nodejs"
],
"templating": {
"list": [
{
"current": {
"selected": true,
"text": [
"All"
],
"value": [
"$__all"
]
},
"datasource": {
"type": "prometheus",
"uid": "PBFA97CFB590B2093"
},
"definition": "label_values(nodejs_version_info, instance)",
"hide": 0,
"includeAll": true,
"label": "instance",
"multi": true,
"name": "instance",
"options": [],
"query": "label_values(nodejs_version_info, instance)",
"refresh": 1,
"regex": "",
"skipUrlSync": false,
"sort": 1,
"tagValuesQuery": "",
"tagsQuery": "",
"type": "query",
"useTags": false
}
]
},
"time": {
"from": "now-15m",
"to": "now"
},
"timepicker": {
"refresh_intervals": [
"5s",
"10s",
"30s",
"1m",
"5m",
"15m",
"30m",
"1h",
"2h",
"1d"
],
"time_options": [
"5m",
"15m",
"1h",
"6h",
"12h",
"24h",
"2d",
"7d",
"30d"
]
},
"timezone": "",
"title": "NodeJS Application Dashboard",
"uid": "PTSqcpJWk",
"version": 3,
"weekStart": ""
}

View File

@ -1,14 +0,0 @@
# https://www.clever-cloud.com/blog/features/2021/12/03/slack-alerts-for-grafana/
apiVersion: 1
contactPoints:
- orgId: 1
name: SlackNotifier
receivers:
- uid: a71b06e3-58b6-41fe-af65-fbbb29653951
type: slack
settings:
# Slack hook URL (see https://api.slack.com/messaging/webhooks)
url: <YOUR_SLACK_HOOK_URL>
disableResolveMessage: false

View File

@ -1,15 +0,0 @@
# https://grafana.com/docs/grafana/latest/alerting/alerting-rules/create-notification-policy/
apiVersion: 1
policies:
- orgId: 1
receiver: grafana-default-email
group_by:
- grafana_folder
- alertname
routes:
- receiver: SlackNotifier
object_matchers:
# Add matchers below
# - ['grafana_folder', '=', 'MyAlerts']

View File

@ -1,10 +0,0 @@
apiVersion: 1
providers:
- name: dashboards
type: file
updateIntervalSeconds: 10
allowUiUpdates: true
options:
path: /etc/grafana/dashboards
foldersFromFilesStructure: true

View File

@ -1,16 +0,0 @@
apiVersion: 1
datasources:
- id: 1
orgId: 1
name: Prometheus
type: prometheus
typeName: Prometheus
typeLogoUrl: public/app/plugins/datasource/prometheus/img/prometheus_logo.svg
access: proxy
url: http://prometheus:9090
isDefault: true
jsonData:
httpMethod: POST
version: 1
editable: true

View File

@ -1,8 +0,0 @@
auth_modules:
foo:
type: userpass
userpass:
username: username
password: password
options:
sslmode: disable

View File

@ -1,67 +0,0 @@
global:
scrape_interval: 10s
evaluation_interval: 15s
rule_files:
# - "first.rules"
# - "second.rules"
scrape_configs:
- job_name: prometheus
static_configs:
- targets: ['localhost:9090']
- job_name: node
static_configs:
# Add node-exporter targets to be monitored below
# - targets: ['example-host:9100']
# labels:
# instance: 'my-host'
- job_name: 'blackbox'
scrape_interval: 10s
metrics_path: /probe
params:
module: [http_2xx]
static_configs:
# Add URLs to be monitored below
- targets:
# - https://github.com
relabel_configs:
- source_labels: [__address__]
regex: (.*)(:80)?
target_label: __param_target
- source_labels: [__param_target]
regex: (.*)
target_label: instance
replacement: ${1}
- source_labels: []
regex: .*
target_label: __address__
replacement: blackbox:9115
- job_name: chain_heads
scrape_interval: 10s
metrics_path: /metrics
scheme: http
static_configs:
- targets: ['chain-head-exporter:5000']
- job_name: 'postgres'
scrape_interval: 30s
scrape_timeout: 30s
static_configs:
# Add DB targets below
# - targets: [example-server:5432]
# labels:
# instance: 'example-label'
metrics_path: /probe
params:
auth_module: [foo]
relabel_configs:
- source_labels: [__address__]
target_label: __param_target
- source_labels: [__param_target]
target_label: instance
- target_label: __address__
replacement: postgres-exporter:9187

View File

@ -1,933 +0,0 @@
# https://grafana.com/docs/grafana/latest/alerting/alerting-rules/create-grafana-managed-rule/
apiVersion: 1
groups:
- orgId: 1
name: watcher
folder: WatcherAlerts
interval: 30s
rules:
# Azimuth
- uid: azimuth_diff_external
title: azimuth_watcher_head_tracking
condition: condition
data:
- refId: diff
relativeTimeRange:
from: 600
to: 0
datasourceUid: PBFA97CFB590B2093
model:
datasource:
type: prometheus
uid: PBFA97CFB590B2093
disableTextWrap: false
editorMode: code
expr: latest_block_number - on(chain) group_right sync_status_block_number{job="azimuth", instance="azimuth", kind="latest_indexed"}
fullMetaSearch: false
includeNullMetadata: true
instant: true
intervalMs: 1000
legendFormat: __auto
maxDataPoints: 43200
range: false
refId: diff
useBackend: false
- refId: latest_external
relativeTimeRange:
from: 600
to: 0
datasourceUid: PBFA97CFB590B2093
model:
datasource:
type: prometheus
uid: PBFA97CFB590B2093
editorMode: code
expr: latest_block_number{chain="ethereum"}
hide: false
instant: true
legendFormat: __auto
range: false
refId: latest_external
- refId: latest_indexed
relativeTimeRange:
from: 600
to: 0
datasourceUid: PBFA97CFB590B2093
model:
datasource:
type: prometheus
uid: PBFA97CFB590B2093
editorMode: code
expr: sync_status_block_number{job="azimuth", instance="azimuth", kind="latest_indexed"}
hide: false
instant: true
legendFormat: __auto
range: false
refId: latest_indexed
- refId: condition
relativeTimeRange:
from: 600
to: 0
datasourceUid: __expr__
model:
conditions:
- evaluator:
params:
- 0
- 0
type: gt
operator:
type: and
query:
params: []
reducer:
params: []
type: avg
type: query
datasource:
name: Expression
type: __expr__
uid: __expr__
expression: ${diff} >= 16
intervalMs: 1000
maxDataPoints: 43200
refId: condition
type: math
noDataState: Alerting
execErrState: Alerting
for: 15m
annotations:
summary: Watcher {{ index $labels "instance" }} of group {{ index $labels "job" }} is falling behind external head by {{ index $values "diff" }}
isPaused: false
- uid: censures_diff_external
title: censures_watcher_head_tracking
condition: condition
data:
- refId: diff
relativeTimeRange:
from: 600
to: 0
datasourceUid: PBFA97CFB590B2093
model:
datasource:
type: prometheus
uid: PBFA97CFB590B2093
disableTextWrap: false
editorMode: code
expr: latest_block_number - on(chain) group_right sync_status_block_number{job="azimuth", instance="censures", kind="latest_indexed"}
fullMetaSearch: false
includeNullMetadata: true
instant: true
intervalMs: 1000
legendFormat: __auto
maxDataPoints: 43200
range: false
refId: diff
useBackend: false
- refId: latest_external
relativeTimeRange:
from: 600
to: 0
datasourceUid: PBFA97CFB590B2093
model:
datasource:
type: prometheus
uid: PBFA97CFB590B2093
editorMode: code
expr: latest_block_number{chain="ethereum"}
hide: false
instant: true
legendFormat: __auto
range: false
refId: latest_external
- refId: latest_indexed
relativeTimeRange:
from: 600
to: 0
datasourceUid: PBFA97CFB590B2093
model:
datasource:
type: prometheus
uid: PBFA97CFB590B2093
editorMode: code
expr: sync_status_block_number{job="azimuth", instance="censures", kind="latest_indexed"}
hide: false
instant: true
legendFormat: __auto
range: false
refId: latest_indexed
- refId: condition
relativeTimeRange:
from: 600
to: 0
datasourceUid: __expr__
model:
conditions:
- evaluator:
params:
- 0
- 0
type: gt
operator:
type: and
query:
params: []
reducer:
params: []
type: avg
type: query
datasource:
name: Expression
type: __expr__
uid: __expr__
expression: ${diff} >= 16
intervalMs: 1000
maxDataPoints: 43200
refId: condition
type: math
noDataState: Alerting
execErrState: Alerting
for: 15m
annotations:
summary: Watcher {{ index $labels "instance" }} of group {{ index $labels "job" }} is falling behind external head by {{ index $values "diff" }}
isPaused: false
- uid: claims_diff_external
title: claims_watcher_head_tracking
condition: condition
data:
- refId: diff
relativeTimeRange:
from: 600
to: 0
datasourceUid: PBFA97CFB590B2093
model:
datasource:
type: prometheus
uid: PBFA97CFB590B2093
disableTextWrap: false
editorMode: code
expr: latest_block_number - on(chain) group_right sync_status_block_number{job="azimuth", instance="claims", kind="latest_indexed"}
fullMetaSearch: false
includeNullMetadata: true
instant: true
intervalMs: 1000
legendFormat: __auto
maxDataPoints: 43200
range: false
refId: diff
useBackend: false
- refId: latest_external
relativeTimeRange:
from: 600
to: 0
datasourceUid: PBFA97CFB590B2093
model:
datasource:
type: prometheus
uid: PBFA97CFB590B2093
editorMode: code
expr: latest_block_number{chain="ethereum"}
hide: false
instant: true
legendFormat: __auto
range: false
refId: latest_external
- refId: latest_indexed
relativeTimeRange:
from: 600
to: 0
datasourceUid: PBFA97CFB590B2093
model:
datasource:
type: prometheus
uid: PBFA97CFB590B2093
editorMode: code
expr: sync_status_block_number{job="azimuth", instance="claims", kind="latest_indexed"}
hide: false
instant: true
legendFormat: __auto
range: false
refId: latest_indexed
- refId: condition
relativeTimeRange:
from: 600
to: 0
datasourceUid: __expr__
model:
conditions:
- evaluator:
params:
- 0
- 0
type: gt
operator:
type: and
query:
params: []
reducer:
params: []
type: avg
type: query
datasource:
name: Expression
type: __expr__
uid: __expr__
expression: ${diff} >= 16
intervalMs: 1000
maxDataPoints: 43200
refId: condition
type: math
noDataState: Alerting
execErrState: Alerting
for: 15m
annotations:
summary: Watcher {{ index $labels "instance" }} of group {{ index $labels "job" }} is falling behind external head by {{ index $values "diff" }}
isPaused: false
- uid: conditional_star_release_diff_external
title: conditional_star_release_watcher_head_tracking
condition: condition
data:
- refId: diff
relativeTimeRange:
from: 600
to: 0
datasourceUid: PBFA97CFB590B2093
model:
datasource:
type: prometheus
uid: PBFA97CFB590B2093
disableTextWrap: false
editorMode: code
expr: latest_block_number - on(chain) group_right sync_status_block_number{job="azimuth", instance="conditional_star_release", kind="latest_indexed"}
fullMetaSearch: false
includeNullMetadata: true
instant: true
intervalMs: 1000
legendFormat: __auto
maxDataPoints: 43200
range: false
refId: diff
useBackend: false
- refId: latest_external
relativeTimeRange:
from: 600
to: 0
datasourceUid: PBFA97CFB590B2093
model:
datasource:
type: prometheus
uid: PBFA97CFB590B2093
editorMode: code
expr: latest_block_number{chain="ethereum"}
hide: false
instant: true
legendFormat: __auto
range: false
refId: latest_external
- refId: latest_indexed
relativeTimeRange:
from: 600
to: 0
datasourceUid: PBFA97CFB590B2093
model:
datasource:
type: prometheus
uid: PBFA97CFB590B2093
editorMode: code
expr: sync_status_block_number{job="azimuth", instance="conditional_star_release", kind="latest_indexed"}
hide: false
instant: true
legendFormat: __auto
range: false
refId: latest_indexed
- refId: condition
relativeTimeRange:
from: 600
to: 0
datasourceUid: __expr__
model:
conditions:
- evaluator:
params:
- 0
- 0
type: gt
operator:
type: and
query:
params: []
reducer:
params: []
type: avg
type: query
datasource:
name: Expression
type: __expr__
uid: __expr__
expression: ${diff} >= 16
intervalMs: 1000
maxDataPoints: 43200
refId: condition
type: math
noDataState: Alerting
execErrState: Alerting
for: 15m
annotations:
summary: Watcher {{ index $labels "instance" }} of group {{ index $labels "job" }} is falling behind external head by {{ index $values "diff" }}
isPaused: false
- uid: delegated_sending_diff_external
title: delegated_sending_watcher_head_tracking
condition: condition
data:
- refId: diff
relativeTimeRange:
from: 600
to: 0
datasourceUid: PBFA97CFB590B2093
model:
datasource:
type: prometheus
uid: PBFA97CFB590B2093
disableTextWrap: false
editorMode: code
expr: latest_block_number - on(chain) group_right sync_status_block_number{job="azimuth", instance="delegated_sending", kind="latest_indexed"}
fullMetaSearch: false
includeNullMetadata: true
instant: true
intervalMs: 1000
legendFormat: __auto
maxDataPoints: 43200
range: false
refId: diff
useBackend: false
- refId: latest_external
relativeTimeRange:
from: 600
to: 0
datasourceUid: PBFA97CFB590B2093
model:
datasource:
type: prometheus
uid: PBFA97CFB590B2093
editorMode: code
expr: latest_block_number{chain="ethereum"}
hide: false
instant: true
legendFormat: __auto
range: false
refId: latest_external
- refId: latest_indexed
relativeTimeRange:
from: 600
to: 0
datasourceUid: PBFA97CFB590B2093
model:
datasource:
type: prometheus
uid: PBFA97CFB590B2093
editorMode: code
expr: sync_status_block_number{job="azimuth", instance="delegated_sending", kind="latest_indexed"}
hide: false
instant: true
legendFormat: __auto
range: false
refId: latest_indexed
- refId: condition
relativeTimeRange:
from: 600
to: 0
datasourceUid: __expr__
model:
conditions:
- evaluator:
params:
- 0
- 0
type: gt
operator:
type: and
query:
params: []
reducer:
params: []
type: avg
type: query
datasource:
name: Expression
type: __expr__
uid: __expr__
expression: ${diff} >= 16
intervalMs: 1000
maxDataPoints: 43200
refId: condition
type: math
noDataState: Alerting
execErrState: Alerting
for: 15m
annotations:
summary: Watcher {{ index $labels "instance" }} of group {{ index $labels "job" }} is falling behind external head by {{ index $values "diff" }}
isPaused: false
- uid: ecliptic_diff_external
title: ecliptic_watcher_head_tracking
condition: condition
data:
- refId: diff
relativeTimeRange:
from: 600
to: 0
datasourceUid: PBFA97CFB590B2093
model:
datasource:
type: prometheus
uid: PBFA97CFB590B2093
disableTextWrap: false
editorMode: code
expr: latest_block_number - on(chain) group_right sync_status_block_number{job="azimuth", instance="ecliptic", kind="latest_indexed"}
fullMetaSearch: false
includeNullMetadata: true
instant: true
intervalMs: 1000
legendFormat: __auto
maxDataPoints: 43200
range: false
refId: diff
useBackend: false
- refId: latest_external
relativeTimeRange:
from: 600
to: 0
datasourceUid: PBFA97CFB590B2093
model:
datasource:
type: prometheus
uid: PBFA97CFB590B2093
editorMode: code
expr: latest_block_number{chain="ethereum"}
hide: false
instant: true
legendFormat: __auto
range: false
refId: latest_external
- refId: latest_indexed
relativeTimeRange:
from: 600
to: 0
datasourceUid: PBFA97CFB590B2093
model:
datasource:
type: prometheus
uid: PBFA97CFB590B2093
editorMode: code
expr: sync_status_block_number{job="azimuth", instance="ecliptic", kind="latest_indexed"}
hide: false
instant: true
legendFormat: __auto
range: false
refId: latest_indexed
- refId: condition
relativeTimeRange:
from: 600
to: 0
datasourceUid: __expr__
model:
conditions:
- evaluator:
params:
- 0
- 0
type: gt
operator:
type: and
query:
params: []
reducer:
params: []
type: avg
type: query
datasource:
name: Expression
type: __expr__
uid: __expr__
expression: ${diff} >= 16
intervalMs: 1000
maxDataPoints: 43200
refId: condition
type: math
noDataState: Alerting
execErrState: Alerting
for: 15m
annotations:
summary: Watcher {{ index $labels "instance" }} of group {{ index $labels "job" }} is falling behind external head by {{ index $values "diff" }}
isPaused: false
- uid: linear_star_release_diff_external
title: linear_star_release_watcher_head_tracking
condition: condition
data:
- refId: diff
relativeTimeRange:
from: 600
to: 0
datasourceUid: PBFA97CFB590B2093
model:
datasource:
type: prometheus
uid: PBFA97CFB590B2093
disableTextWrap: false
editorMode: code
expr: latest_block_number - on(chain) group_right sync_status_block_number{job="azimuth", instance="linear_star_release", kind="latest_indexed"}
fullMetaSearch: false
includeNullMetadata: true
instant: true
intervalMs: 1000
legendFormat: __auto
maxDataPoints: 43200
range: false
refId: diff
useBackend: false
- refId: latest_external
relativeTimeRange:
from: 600
to: 0
datasourceUid: PBFA97CFB590B2093
model:
datasource:
type: prometheus
uid: PBFA97CFB590B2093
editorMode: code
expr: latest_block_number{chain="ethereum"}
hide: false
instant: true
legendFormat: __auto
range: false
refId: latest_external
- refId: latest_indexed
relativeTimeRange:
from: 600
to: 0
datasourceUid: PBFA97CFB590B2093
model:
datasource:
type: prometheus
uid: PBFA97CFB590B2093
editorMode: code
expr: sync_status_block_number{job="azimuth", instance="azimuth", kind="latest_indexed"}
hide: false
instant: true
legendFormat: __auto
range: false
refId: latest_indexed
- refId: condition
relativeTimeRange:
from: 600
to: 0
datasourceUid: __expr__
model:
conditions:
- evaluator:
params:
- 0
- 0
type: gt
operator:
type: and
query:
params: []
reducer:
params: []
type: avg
type: query
datasource:
name: Expression
type: __expr__
uid: __expr__
expression: ${diff} >= 16
intervalMs: 1000
maxDataPoints: 43200
refId: condition
type: math
noDataState: Alerting
execErrState: Alerting
for: 15m
annotations:
summary: Watcher {{ index $labels "instance" }} of group {{ index $labels "job" }} is falling behind external head by {{ index $values "diff" }}
isPaused: false
- uid: polls_diff_external
title: polls_watcher_head_tracking
condition: condition
data:
- refId: diff
relativeTimeRange:
from: 600
to: 0
datasourceUid: PBFA97CFB590B2093
model:
datasource:
type: prometheus
uid: PBFA97CFB590B2093
disableTextWrap: false
editorMode: code
expr: latest_block_number - on(chain) group_right sync_status_block_number{job="azimuth", instance="polls", kind="latest_indexed"}
fullMetaSearch: false
includeNullMetadata: true
instant: true
intervalMs: 1000
legendFormat: __auto
maxDataPoints: 43200
range: false
refId: diff
useBackend: false
- refId: latest_external
relativeTimeRange:
from: 600
to: 0
datasourceUid: PBFA97CFB590B2093
model:
datasource:
type: prometheus
uid: PBFA97CFB590B2093
editorMode: code
expr: latest_block_number{chain="ethereum"}
hide: false
instant: true
legendFormat: __auto
range: false
refId: latest_external
- refId: latest_indexed
relativeTimeRange:
from: 600
to: 0
datasourceUid: PBFA97CFB590B2093
model:
datasource:
type: prometheus
uid: PBFA97CFB590B2093
editorMode: code
expr: sync_status_block_number{job="azimuth", instance="polls", kind="latest_indexed"}
hide: false
instant: true
legendFormat: __auto
range: false
refId: latest_indexed
- refId: condition
relativeTimeRange:
from: 600
to: 0
datasourceUid: __expr__
model:
conditions:
- evaluator:
params:
- 0
- 0
type: gt
operator:
type: and
query:
params: []
reducer:
params: []
type: avg
type: query
datasource:
name: Expression
type: __expr__
uid: __expr__
expression: ${diff} >= 16
intervalMs: 1000
maxDataPoints: 43200
refId: condition
type: math
noDataState: Alerting
execErrState: Alerting
for: 15m
annotations:
summary: Watcher {{ index $labels "instance" }} of group {{ index $labels "job" }} is falling behind external head by {{ index $values "diff" }}
isPaused: false
# Sushi
- uid: sushiswap_diff_external
title: sushiswap_watcher_head_tracking
condition: condition
data:
- refId: diff
relativeTimeRange:
from: 600
to: 0
datasourceUid: PBFA97CFB590B2093
model:
datasource:
type: prometheus
uid: PBFA97CFB590B2093
disableTextWrap: false
editorMode: code
expr: latest_block_number - on(chain) group_right sync_status_block_number{job="sushi", instance="sushiswap", kind="latest_indexed"}
fullMetaSearch: false
includeNullMetadata: true
instant: true
intervalMs: 1000
legendFormat: __auto
maxDataPoints: 43200
range: false
refId: diff
useBackend: false
- refId: latest_external
relativeTimeRange:
from: 600
to: 0
datasourceUid: PBFA97CFB590B2093
model:
datasource:
type: prometheus
uid: PBFA97CFB590B2093
editorMode: code
expr: latest_block_number{chain="filecoin"}
hide: false
instant: true
legendFormat: __auto
range: false
refId: latest_external
- refId: latest_indexed
relativeTimeRange:
from: 600
to: 0
datasourceUid: PBFA97CFB590B2093
model:
datasource:
type: prometheus
uid: PBFA97CFB590B2093
editorMode: code
expr: sync_status_block_number{job="sushi", instance="sushiswap", kind="latest_indexed"}
hide: false
instant: true
legendFormat: __auto
range: false
refId: latest_indexed
- refId: condition
relativeTimeRange:
from: 600
to: 0
datasourceUid: __expr__
model:
conditions:
- evaluator:
params:
- 0
- 0
type: gt
operator:
type: and
query:
params: []
reducer:
params: []
type: avg
type: query
datasource:
name: Expression
type: __expr__
uid: __expr__
expression: ${diff} >= 16
intervalMs: 1000
maxDataPoints: 43200
refId: condition
type: math
noDataState: Alerting
execErrState: Alerting
for: 15m
annotations:
summary: Watcher {{ index $labels "instance" }} of group {{ index $labels "job" }} is falling behind external head by {{ index $values "diff" }}
isPaused: false
- uid: merkl_sushiswap_diff_external
title: merkl_sushiswap_watcher_head_tracking
condition: condition
data:
- refId: diff
relativeTimeRange:
from: 600
to: 0
datasourceUid: PBFA97CFB590B2093
model:
datasource:
type: prometheus
uid: PBFA97CFB590B2093
disableTextWrap: false
editorMode: code
expr: latest_block_number - on(chain) group_right sync_status_block_number{job="sushi", instance="merkl_sushiswap", kind="latest_indexed"}
fullMetaSearch: false
includeNullMetadata: true
instant: true
intervalMs: 1000
legendFormat: __auto
maxDataPoints: 43200
range: false
refId: diff
useBackend: false
- refId: latest_external
relativeTimeRange:
from: 600
to: 0
datasourceUid: PBFA97CFB590B2093
model:
datasource:
type: prometheus
uid: PBFA97CFB590B2093
editorMode: code
expr: latest_block_number{chain="filecoin"}
hide: false
instant: true
legendFormat: __auto
range: false
refId: latest_external
- refId: latest_indexed
relativeTimeRange:
from: 600
to: 0
datasourceUid: PBFA97CFB590B2093
model:
datasource:
type: prometheus
uid: PBFA97CFB590B2093
editorMode: code
expr: sync_status_block_number{job="sushi", instance="merkl_sushiswap", kind="latest_indexed"}
hide: false
instant: true
legendFormat: __auto
range: false
refId: latest_indexed
- refId: condition
relativeTimeRange:
from: 600
to: 0
datasourceUid: __expr__
model:
conditions:
- evaluator:
params:
- 0
- 0
type: gt
operator:
type: and
query:
params: []
reducer:
params: []
type: avg
type: query
datasource:
name: Expression
type: __expr__
uid: __expr__
expression: ${diff} >= 16
intervalMs: 1000
maxDataPoints: 43200
refId: condition
type: math
noDataState: Alerting
execErrState: Alerting
for: 15m
annotations:
summary: Watcher {{ index $labels "instance" }} of group {{ index $labels "job" }} is falling behind external head by {{ index $values "diff" }}
isPaused: false

View File

@ -1,3 +0,0 @@
NEXT_PUBLIC_COINGECKO_URL=https://api.coingecko.com
NEXT_PUBLIC_ENABLE_FEATURES=true

View File

@ -1,22 +0,0 @@
#!/bin/bash
set -e
if [ -n "$CERC_SCRIPT_DEBUG" ]; then
set -x
fi
# Check and exit if a deployment already exists (on restarts)
if [ -d /app-builds/osmosis/build ]; then
echo "Build already exists, remove volume to rebuild"
exit 0
fi
yarn build:static
./build-urbit.sh
# Move build to app-builds
mkdir -p /app-builds/osmosis
cp -r ./out /app-builds/osmosis/build
cp -r mar /app-builds/osmosis/
cp desk.docket-0 /app-builds/osmosis/

View File

@ -1,18 +0,0 @@
#!/bin/bash
set -e
if [ -n "$CERC_SCRIPT_DEBUG" ]; then
set -x
fi
# Check and exit if a deployment already exists (on restarts)
if [ -d /app-builds/osmosis/build ]; then
echo "Build already exists, remove volume to rebuild"
exit 0
fi
yarn build:static
# Move build to app-builds
mkdir -p /app-builds/osmosis
cp -r ./out /app-builds/osmosis/build

View File

@ -1,16 +0,0 @@
server {
listen 80;
listen [::]:80;
server_name _;
error_page 500 502 503 504 /50x.html;
location / {
root /usr/share/nginx/osmosis/build;
index index.html index.htm index.nginx-debian.html;
# First attempt to serve request as file, then as html,
# then as directory, then fall back to displaying a 404.
try_files $uri $uri.html $uri/ /index.html =404;
}
}

View File

@ -1,10 +0,0 @@
:~ title+'Osmosis'
info+'Osmosis DEX Frontend, built and maintained by Laconic'
color+0xcd.75df
image+'https://altcoinsbox.com/wp-content/uploads/2023/03/osmosis-logo.png'
base+'osmosis'
glob-http+['REPLACE_WITH_GLOB_URL' REPLACE_WITH_GLOB_HASH]
version+[0 0 2]
website+'https://osmosis.zone/'
license+'MIT'
==

View File

@ -1,12 +0,0 @@
|_ dat=@
++ grow
|%
++ mime [/image/x-icon (as-octs:mimes:html dat)]
--
++ grab
|%
++ mime |=([p=mite q=octs] q.q)
++ noun @
--
++ grad %mime
--

View File

@ -1,12 +0,0 @@
|_ dat=@
++ grow
|%
++ mime [/image/jpeg (as-octs:mimes:html dat)]
--
++ grab
|%
++ mime |=([p=mite q=octs] q.q)
++ noun @
--
++ grad %mime
--

View File

@ -1,18 +0,0 @@
::
:::: /hoon/map/mar
:: Mark for js source maps
/? 310
::
=, eyre
|_ mud=@
++ grow
|%
++ mime [/application/octet-stream (as-octs:mimes:html (@t mud))]
--
++ grab
|% :: convert from
++ mime |=([p=mite q=octs] (@t q.q))
++ noun cord :: clam from %noun
--
++ grad %mime
--

View File

@ -1,12 +0,0 @@
|_ dat=@
++ grow
|%
++ mime [/image/webp (as-octs:mimes:html dat)]
--
++ grab
|%
++ mime |=([p=mite q=octs] q.q)
++ noun @
--
++ grad %mime
--

View File

@ -13,9 +13,6 @@ fi
yarn build yarn build
# Copy over build and other files to app-builds for urbit deployment # Move build to app-builds so urbit can deploy it
mkdir -p /app-builds/uniswap mkdir /app-builds/uniswap
cp -r ./build /app-builds/uniswap/ cp -r ./build /app-builds/uniswap/
cp -r mar /app-builds/uniswap/
cp desk.docket-0 /app-builds/uniswap/

View File

@ -0,0 +1,149 @@
#!/bin/bash
if [ -n "$CERC_SCRIPT_DEBUG" ]; then
set -x
fi
echo "Using IPFS endpoint ${CERC_IPFS_GLOB_HOST_ENDPOINT} for hosting globs"
echo "Using IPFS server endpoint ${CERC_IPFS_SERVER_ENDPOINT} for reading glob files"
ipfs_host_endpoint=${CERC_IPFS_GLOB_HOST_ENDPOINT}
ipfs_server_endpoint=${CERC_IPFS_SERVER_ENDPOINT}
uniswap_app_build='/app-builds/uniswap/build'
uniswap_desk_dir='/urbit/zod/uniswap'
if [ -d ${uniswap_desk_dir} ]; then
echo "Uniswap desk dir already exists, skipping deployment..."
exit 0
fi
# Fire curl requests to perform operations on the ship
dojo () {
curl -s --data '{"source":{"dojo":"'"$1"'"},"sink":{"stdout":null}}' http://localhost:12321
}
hood () {
curl -s --data '{"source":{"dojo":"+hood/'"$1"'"},"sink":{"app":"hood"}}' http://localhost:12321
}
# Create/mount a uniswap desk
hood "merge %uniswap our %landscape"
hood "mount %uniswap"
# Loop until the uniswap build appears
while [ ! -d ${uniswap_app_build} ]; do
echo "Uniswap app build not found, retrying in 5s..."
sleep 5
done
echo "Build found..."
# Copy over build to desk data dir
cp -r ${uniswap_app_build} ${uniswap_desk_dir}
# Create a mark file for .map file type
cat << EOF > "${uniswap_desk_dir}/mar/map.hoon"
::
:::: /hoon/map/mar
:: Mark for js source maps
/? 310
::
=, eyre
|_ mud=@
++ grow
|%
++ mime [/application/octet-stream (as-octs:mimes:html (@t mud))]
--
++ grab
|% :: convert from
++ mime |=([p=mite q=octs] (@t q.q))
++ noun cord :: clam from %noun
--
++ grad %mime
--
EOF
# Create a mark file for .woff file type
cat << EOF > "${uniswap_desk_dir}/mar/woff.hoon"
|_ dat=octs
++ grow
|%
++ mime [/font/woff dat]
--
++ grab
|%
++ mime |=([=mite =octs] octs)
++ noun octs
--
++ grad %mime
--
EOF
# Create a mark file for .ttf file type
cat << EOF > "${uniswap_desk_dir}/mar/ttf.hoon"
|_ dat=octs
++ grow
|%
++ mime [/font/ttf dat]
--
++ grab
|%
++ mime |=([=mite =octs] octs)
++ noun octs
--
++ grad %mime
--
EOF
rm "${uniswap_desk_dir}/desk.bill"
rm "${uniswap_desk_dir}/desk.ship"
# Commit changes and create a glob
hood "commit %uniswap"
dojo "-landscape!make-glob %uniswap /build"
glob_file=$(ls -1 -c zod/.urb/put | head -1)
echo "Created glob file: ${glob_file}"
upload_response=$(curl -X POST -F file=@./zod/.urb/put/${glob_file} ${ipfs_host_endpoint}/api/v0/add)
glob_cid=$(echo "$upload_response" | grep -o '"Hash":"[^"]*' | sed 's/"Hash":"//')
echo "Glob file uploaded to IFPS:"
echo "{ cid: ${glob_cid}, filename: ${glob_file} }"
# Curl and wait for the glob to be hosted
glob_url="${ipfs_server_endpoint}/ipfs/${glob_cid}?filename=${glob_file}"
echo "Checking if glob file hosted at ${glob_url}"
while true; do
response=$(curl -sL -w "%{http_code}" -o /dev/null "$glob_url")
if [ $response -eq 200 ]; then
echo "File found at $glob_url"
break # Exit the loop if the file is found
else
echo "File not found. Retrying in a few seconds..."
sleep 5
fi
done
glob_hash=$(echo "$glob_file" | sed "s/glob-\([a-z0-9\.]*\).glob/\1/")
# Update the docket file
cat << EOF > "${uniswap_desk_dir}/desk.docket-0"
:~ title+'Uniswap'
info+'Self-hosted uniswap frontend.'
color+0xcd.75df
image+'https://logowik.com/content/uploads/images/uniswap-uni7403.jpg'
base+'uniswap'
glob-http+['${glob_url}' ${glob_hash}]
version+[0 0 1]
website+'https://uniswap.org/'
license+'MIT'
==
EOF
# Commit changes and install the app
hood "commit %uniswap"
hood "install our %uniswap"
echo "Uniswap app installed"

View File

@ -0,0 +1,110 @@
#!/bin/bash
# $1: Glob file URL (eg. https://xyz.com/glob-0vabcd.glob)
# $2: Glob file hash (eg. 0vabcd)
# $3: Urbit ship's pier dir (default: ./zod)
if [ "$#" -lt 2 ]; then
echo "Insufficient arguments"
exit 0
fi
glob_url=$1
glob_hash=$2
echo "Using glob file from ${glob_url} with hash ${glob_hash}"
# Default pier dir: ./zod
# Default desk dir: ./zod/uniswap
pier_dir="${3:-./zod}"
uniswap_desk_dir="${pier_dir}/uniswap"
echo "Using ${uniswap_desk_dir} as the Uniswap desk dir path"
# Fire curl requests to perform operations on the ship
dojo () {
curl -s --data '{"source":{"dojo":"'"$1"'"},"sink":{"stdout":null}}' http://localhost:12321
}
hood () {
curl -s --data '{"source":{"dojo":"+hood/'"$1"'"},"sink":{"app":"hood"}}' http://localhost:12321
}
# Create/mount a uniswap desk
hood "merge %uniswap our %landscape"
hood "mount %uniswap"
# Create a mark file for .map file type
cat << EOF > "${uniswap_desk_dir}/mar/map.hoon"
::
:::: /hoon/map/mar
:: Mark for js source maps
/? 310
::
=, eyre
|_ mud=@
++ grow
|%
++ mime [/application/octet-stream (as-octs:mimes:html (@t mud))]
--
++ grab
|% :: convert from
++ mime |=([p=mite q=octs] (@t q.q))
++ noun cord :: clam from %noun
--
++ grad %mime
--
EOF
# Create a mark file for .woff file type
cat << EOF > "${uniswap_desk_dir}/mar/woff.hoon"
|_ dat=octs
++ grow
|%
++ mime [/font/woff dat]
--
++ grab
|%
++ mime |=([=mite =octs] octs)
++ noun octs
--
++ grad %mime
--
EOF
# Create a mark file for .ttf file type
cat << EOF > "${uniswap_desk_dir}/mar/ttf.hoon"
|_ dat=octs
++ grow
|%
++ mime [/font/ttf dat]
--
++ grab
|%
++ mime |=([=mite =octs] octs)
++ noun octs
--
++ grad %mime
--
EOF
rm "${uniswap_desk_dir}/desk.bill"
rm "${uniswap_desk_dir}/desk.ship"
# Update the docket file
cat << EOF > "${uniswap_desk_dir}/desk.docket-0"
:~ title+'Uniswap'
info+'Self-hosted uniswap frontend.'
color+0xcd.75df
image+'https://logowik.com/content/uploads/images/uniswap-uni7403.jpg'
base+'uniswap'
glob-http+['${glob_url}' ${glob_hash}]
version+[0 0 1]
website+'https://uniswap.org/'
license+'MIT'
==
EOF
# Commit changes and install the app
hood "commit %uniswap"
hood "install our %uniswap"
echo "Uniswap app installed"

View File

@ -0,0 +1,21 @@
#!/bin/bash
# $1: Remote user host
# $2: Remote Urbit ship's pier dir path (eg. /home/user/zod)
# $3: Glob file URL (eg. https://xyz.com/glob-0vabcd.glob)
# $4: Glob file hash (eg. 0vabcd)
if [ "$#" -ne 4 ]; then
echo "Incorrect number of arguments"
echo "Usage: $0 <username@remote_host> </path/to/remote/pier/folder> <glob_url> <glob_hash>"
exit 1
fi
remote_user_host="$1"
remote_pier_folder="$2"
glob_url="$3"
glob_hash="$4"
installation_script="./install-uniswap-app.sh"
ssh "$remote_user_host" "bash -s $glob_url $glob_hash $remote_pier_folder" < "$installation_script"

View File

@ -1,10 +0,0 @@
:~ title+'Uniswap'
info+'Self-hosted uniswap frontend.'
color+0xcd.75df
image+'https://logowik.com/content/uploads/images/uniswap-uni7403.jpg'
base+'uniswap'
glob-http+['REPLACE_WITH_GLOB_URL' REPLACE_WITH_GLOB_HASH]
version+[0 0 1]
website+'https://uniswap.org/'
license+'MIT'
==

View File

@ -1,18 +0,0 @@
::
:::: /hoon/map/mar
:: Mark for js source maps
/? 310
::
=, eyre
|_ mud=@
++ grow
|%
++ mime [/application/octet-stream (as-octs:mimes:html (@t mud))]
--
++ grab
|% :: convert from
++ mime |=([p=mite q=octs] (@t q.q))
++ noun cord :: clam from %noun
--
++ grad %mime
--

View File

@ -1,12 +0,0 @@
|_ dat=octs
++ grow
|%
++ mime [/font/ttf dat]
--
++ grab
|%
++ mime |=([=mite =octs] octs)
++ noun octs
--
++ grad %mime
--

View File

@ -1,12 +0,0 @@
|_ dat=octs
++ grow
|%
++ mime [/font/woff dat]
--
++ grab
|%
++ mime |=([=mite =octs] octs)
++ noun octs
--
++ grad %mime
--

View File

@ -1,34 +0,0 @@
#!/bin/bash
# $1: Remote user host
# $2: App name (eg. uniswap)
# $3: Assets dir path (local) for app (eg. /home/user/myapp/urbit-files)
# $4: Remote Urbit ship's pier dir path (eg. /home/user/zod)
# $5: Glob file URL (eg. https://xyz.com/glob-0vabcd.glob)
# $6: Glob file hash (eg. 0vabcd)
if [ "$#" -ne 6 ]; then
echo "Incorrect number of arguments"
echo "Usage: $0 <username@remote_host> <app_name> </path/to/app/assets/folder> </path/to/remote/pier/folder> <glob_url> <glob_hash>"
exit 1
fi
remote_user_host="$1"
app_name=$2
app_assets_folder=$3
remote_pier_folder="$4"
glob_url="$5"
glob_hash="$6"
installation_script="./install-urbit-app.sh"
# Copy over the assets to remote machine in a tmp dir
remote_app_assets_folder=/tmp/urbit-app-assets/$app_name
ssh "$remote_user_host" "mkdir -p $remote_app_assets_folder"
scp -r $app_assets_folder/* $remote_user_host:$remote_app_assets_folder
# Run the installation script
ssh "$remote_user_host" "bash -s $app_name $remote_app_assets_folder '${glob_url}' $glob_hash $remote_pier_folder" < "$installation_script"
# Remove the tmp assets dir
ssh "$remote_user_host" "rm -rf $remote_app_assets_folder"

View File

@ -1,110 +0,0 @@
#!/bin/bash
if [ -n "$CERC_SCRIPT_DEBUG" ]; then
set -x
fi
if [ -z "$CERC_URBIT_APP" ]; then
echo "CERC_URBIT_APP not set, exiting"
exit 0
fi
echo "Creating Urbit application for ${CERC_URBIT_APP}"
app_desk_dir=/urbit/zod/${CERC_URBIT_APP}
if [ -d ${app_desk_dir} ]; then
echo "Desk dir already exists for ${CERC_URBIT_APP}, skipping deployment..."
exit 0
fi
app_build=/app-builds/${CERC_URBIT_APP}/build
app_mark_files=/app-builds/${CERC_URBIT_APP}/mar
app_docket_file=/app-builds/${CERC_URBIT_APP}/desk.docket-0
echo "Reading app build from ${app_build}"
echo "Reading additional mark files from ${app_mark_files}"
echo "Reading docket file ${app_docket_file}"
# Loop until the app's build appears
while [ ! -d ${app_build} ]; do
echo "${CERC_URBIT_APP} app build not found, retrying in 5s..."
sleep 5
done
echo "Build found..."
echo "Using IPFS endpoint ${CERC_IPFS_GLOB_HOST_ENDPOINT} for hosting the ${CERC_URBIT_APP} glob"
echo "Using IPFS server endpoint ${CERC_IPFS_SERVER_ENDPOINT} for reading ${CERC_URBIT_APP} glob"
ipfs_host_endpoint=${CERC_IPFS_GLOB_HOST_ENDPOINT}
ipfs_server_endpoint=${CERC_IPFS_SERVER_ENDPOINT}
# Fire curl requests to perform operations on the ship
dojo () {
curl -s --data '{"source":{"dojo":"'"$1"'"},"sink":{"stdout":null}}' http://localhost:12321
}
hood () {
curl -s --data '{"source":{"dojo":"+hood/'"$1"'"},"sink":{"app":"hood"}}' http://localhost:12321
}
# Create / mount the app's desk
hood "merge %${CERC_URBIT_APP} our %landscape"
hood "mount %${CERC_URBIT_APP}"
# Copy over build to desk data dir
cp -r ${app_build} ${app_desk_dir}
# Copy over the additional mark files
cp ${app_mark_files}/* ${app_desk_dir}/mar/
rm "${app_desk_dir}/desk.bill"
rm "${app_desk_dir}/desk.ship"
# Commit changes and create a glob
hood "commit %${CERC_URBIT_APP}"
dojo "-landscape!make-glob %${CERC_URBIT_APP} /build"
glob_file=$(ls -1 -c zod/.urb/put | head -1)
echo "Created glob file: ${glob_file}"
# Upload the glob file to IPFS
echo "Uploading glob file to ${ipfs_host_endpoint}"
upload_response=$(curl -X POST -F file=@./zod/.urb/put/${glob_file} ${ipfs_host_endpoint}/api/v0/add)
glob_cid=$(echo "$upload_response" | grep -o '"Hash":"[^"]*' | sed 's/"Hash":"//')
glob_url="${ipfs_server_endpoint}/ipfs/${glob_cid}?filename=${glob_file}"
glob_hash=$(echo "$glob_file" | sed "s/glob-\([a-z0-9\.]*\).glob/\1/")
echo "Glob file uploaded to IFPS:"
echo "{ cid: ${glob_cid}, filename: ${glob_file} }"
echo "{ url: ${glob_url}, hash: ${glob_hash} }"
# Exit if the installation not required
if [ "$CERC_ENABLE_APP_INSTALL" = "false" ]; then
echo "CERC_ENABLE_APP_INSTALL set to false, skipping app installation"
exit 0
fi
# Curl and wait for the glob to be hosted
echo "Checking if glob file hosted at ${glob_url}"
while true; do
response=$(curl -sL -w "%{http_code}" -o /dev/null "$glob_url")
if [ $response -eq 200 ]; then
echo "File found at $glob_url"
break # Exit the loop if the file is found
else
echo "File not found, retrying in a 5s..."
sleep 5
fi
done
# Replace the docket file for app
# Substitue the glob URL and hash
cp ${app_docket_file} ${app_desk_dir}/
sed -i "s|REPLACE_WITH_GLOB_URL|${glob_url}|g; s|REPLACE_WITH_GLOB_HASH|${glob_hash}|g" ${app_desk_dir}/desk.docket-0
# Commit changes and install the app
hood "commit %${CERC_URBIT_APP}"
hood "install our %${CERC_URBIT_APP}"
echo "${CERC_URBIT_APP} app installed"

View File

@ -1,60 +0,0 @@
#!/bin/bash
# $1: App name (eg. uniswap)
# $2: Assets dir path (local) for app (eg. /home/user/myapp/urbit-files)
# $3: Glob file URL (eg. https://xyz.com/glob-0vabcd.glob)
# $4: Glob file hash (eg. 0vabcd)
# $5: Urbit ship's pier dir (default: ./zod)
if [ "$#" -lt 4 ]; then
echo "Insufficient arguments"
echo "Usage: $0 <app_name> </path/to/app/assets/folder> <glob_url> <glob_hash> [/path/to/remote/pier/folder]"
exit 1
fi
app_name=$1
app_mark_files=$2/mar
app_docket_file=$2/desk.docket-0
echo "Creating Urbit application for ${app_name}"
echo "Reading additional mark files from ${app_mark_files}"
echo "Reading docket file ${app_docket_file}"
glob_url=$3
glob_hash=$4
echo "Using glob file from ${glob_url} with hash ${glob_hash}"
# Default pier dir: ./zod
# Default desk dir: ./zod/<app_name>
pier_dir="${5:-./zod}"
app_desk_dir="${pier_dir}/${app_name}"
echo "Using ${app_desk_dir} as the ${app_name} desk dir path"
# Fire curl requests to perform operations on the ship
dojo () {
curl -s --data '{"source":{"dojo":"'"$1"'"},"sink":{"stdout":null}}' http://localhost:12321
}
hood () {
curl -s --data '{"source":{"dojo":"+hood/'"$1"'"},"sink":{"app":"hood"}}' http://localhost:12321
}
# Create / mount the app's desk
hood "merge %${app_name} our %landscape"
hood "mount %${app_name}"
# Copy over the additional mark files
cp ${app_mark_files}/* ${app_desk_dir}/mar/
rm "${app_desk_dir}/desk.bill"
rm "${app_desk_dir}/desk.ship"
# Replace the docket file for app
# Substitue the glob URL and hash
cp ${app_docket_file} ${app_desk_dir}/
sed -i "s|REPLACE_WITH_GLOB_URL|${glob_url}|g; s|REPLACE_WITH_GLOB_HASH|${glob_hash}|g" ${app_desk_dir}/desk.docket-0
# Commit changes and install the app
hood "commit %${app_name}"
hood "install our %${app_name}"
echo "${app_name} app installed"

View File

@ -7,13 +7,11 @@ fi
pier_dir="/urbit/zod" pier_dir="/urbit/zod"
# TODO: Bootstrap fake ship on the first run
# Run urbit ship in daemon mode # Run urbit ship in daemon mode
# Check if the directory exists # Check if the directory exists
if [ -d "$pier_dir" ]; then if [ -d "$pier_dir" ]; then
echo "Pier directory already exists, rebooting..." echo "Pier directory already exists, rebooting..."
/urbit/zod/.run -d urbit -d zod
else else
echo "Creating a new fake ship..." echo "Creating a new fake ship..."
urbit -d -F zod urbit -d -F zod

View File

@ -41,7 +41,7 @@
timeTravelMaxAge = 86400 # 1 day timeTravelMaxAge = 86400 # 1 day
[metrics] [metrics]
host = "0.0.0.0" host = "127.0.0.1"
port = 9000 port = 9000
[metrics.gql] [metrics.gql]
port = 9001 port = 9001
@ -84,6 +84,8 @@
subgraphEventsOrder = true subgraphEventsOrder = true
# Filecoin block time: https://docs.filecoin.io/basics/the-blockchain/blocks-and-tipsets#blocktime # Filecoin block time: https://docs.filecoin.io/basics/the-blockchain/blocks-and-tipsets#blocktime
blockDelayInMilliSecs = 30000 blockDelayInMilliSecs = 30000
prefetchBlocksInMem = false
prefetchBlockCount = 10
# Boolean to switch between modes of processing events when starting the server. # Boolean to switch between modes of processing events when starting the server.
# Setting to true will fetch filtered events and required blocks in a range of blocks and then process them. # Setting to true will fetch filtered events and required blocks in a range of blocks and then process them.

View File

@ -41,7 +41,7 @@
timeTravelMaxAge = 86400 # 1 day timeTravelMaxAge = 86400 # 1 day
[metrics] [metrics]
host = "0.0.0.0" host = "127.0.0.1"
port = 9000 port = 9000
[metrics.gql] [metrics.gql]
port = 9001 port = 9001
@ -84,6 +84,8 @@
subgraphEventsOrder = true subgraphEventsOrder = true
# Filecoin block time: https://docs.filecoin.io/basics/the-blockchain/blocks-and-tipsets#blocktime # Filecoin block time: https://docs.filecoin.io/basics/the-blockchain/blocks-and-tipsets#blocktime
blockDelayInMilliSecs = 30000 blockDelayInMilliSecs = 30000
prefetchBlocksInMem = false
prefetchBlockCount = 10
# Boolean to switch between modes of processing events when starting the server. # Boolean to switch between modes of processing events when starting the server.
# Setting to true will fetch filtered events and required blocks in a range of blocks and then process them. # Setting to true will fetch filtered events and required blocks in a range of blocks and then process them.

View File

@ -2,6 +2,4 @@
# Build a local version of the task executor for act-runner # Build a local version of the task executor for act-runner
source ${CERC_CONTAINER_BASE_DIR}/build-base.sh source ${CERC_CONTAINER_BASE_DIR}/build-base.sh
SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
docker build -t cerc/act-runner-task-executor:local -f ${CERC_REPO_BASE_DIR}/hosting/act-runner/Dockerfile.task-executor ${build_command_args} ${SCRIPT_DIR}
cd ${CERC_REPO_BASE_DIR}/hosting/act-runner
docker build -t cerc/act-runner-task-executor:local -f Dockerfile.task-executor ${build_command_args} .

View File

@ -10,10 +10,9 @@ COPY genesis /opt/genesis
COPY --from=ethgen /usr/local/bin/eth2-testnet-genesis /usr/local/bin/ COPY --from=ethgen /usr/local/bin/eth2-testnet-genesis /usr/local/bin/
COPY --from=ethgen /usr/local/bin/eth2-val-tools /usr/local/bin/ COPY --from=ethgen /usr/local/bin/eth2-val-tools /usr/local/bin/
COPY --from=ethgen /apps /apps COPY --from=ethgen /apps /apps
RUN cd /apps/el-gen && pip3 install --break-system-packages -r requirements.txt RUN cd /apps/el-gen && pip3 install -r requirements.txt
# web3==5.24.0 used by el-gen is broken on python 3.11 # web3==5.24.0 used by el-gen is broken on python 3.11
RUN pip3 install --break-system-packages --upgrade "web3==6.5.0" RUN pip3 install --upgrade "web3==6.5.0"
RUN pip3 install --break-system-packages --upgrade "typing-extensions"
# Build genesis config # Build genesis config
RUN apk add --no-cache make bash envsubst jq RUN apk add --no-cache make bash envsubst jq

View File

@ -1,14 +0,0 @@
record:
type: ApplicationDeploymentRecord
version: 1.2.3
name: name
description: description
application: application
url: url
dns: dns
request: request
meta:
foo: bar
tags:
- a
- b

View File

@ -1,12 +0,0 @@
record:
type: GeneralRecord
version: 1.2.3
name: name
description: description
category: category
value: value
meta:
foo: bar
tags:
- a
- b

View File

@ -1,17 +0,0 @@
record:
type: ApplicationArtifact
version: 1.2.3
name: name
description: description
application: appidgoeshere
content_type: content_type
os: os
cpu: cpu
uri:
- uri://a
- uri://b
meta:
foo: bar
tags:
- a
- b

View File

@ -1,11 +0,0 @@
record:
type: DnsRecord
version: 0.0.1
name: "foo"
resource_type: "A"
value: "bar"
meta:
foo: bar
tags:
- a
- b

View File

@ -1,18 +0,0 @@
record:
type: ApplicationRecord
version: 0.0.1
name: my-demo-app
description: "Description of my app"
homepage: http://my.demo.app
license: license
author: author
repository:
- "https://my.demo.repo"
repository_ref: "v0.1.0"
app_version: "0.1.0"
app_type: "webapp"
meta:
foo: bar
tags:
- a
- b

View File

@ -1,17 +0,0 @@
record:
type: ApplicationDeploymentRequest
version: 1.2.3
application: application
dns: dns
config:
env:
ENV_VAR_A: A
ENV_VAR_B: B
crn:
- crn://foo.bar
- crn://bar.baz
meta:
foo: bar
tags:
- a
- b

View File

@ -1,4 +0,0 @@
#!/usr/bin/env bash
# Build the mars-v2 image
source ${CERC_CONTAINER_BASE_DIR}/build-base.sh
docker build -t cerc/mars-v2:local -f ${CERC_REPO_BASE_DIR}/mars-v2-frontend/Dockerfile ${build_command_args} ${CERC_REPO_BASE_DIR}/mars-v2-frontend

View File

@ -1,4 +0,0 @@
#!/usr/bin/env bash
# Build the mars image
source ${CERC_CONTAINER_BASE_DIR}/build-base.sh
docker build -t cerc/mars:local -f ${CERC_REPO_BASE_DIR}/mars-interface/Dockerfile ${build_command_args} ${CERC_REPO_BASE_DIR}/mars-interface

View File

@ -1,6 +1,6 @@
# Originally from: https://github.com/devcontainers/images/blob/main/src/javascript-node/.devcontainer/Dockerfile # Originally from: https://github.com/devcontainers/images/blob/main/src/javascript-node/.devcontainer/Dockerfile
# [Choice] Node.js version (use -bullseye variants on local arm64/Apple Silicon): 18, 16, 14, 18-bullseye, 16-bullseye, 14-bullseye, 18-buster, 16-buster, 14-buster # [Choice] Node.js version (use -bullseye variants on local arm64/Apple Silicon): 18, 16, 14, 18-bullseye, 16-bullseye, 14-bullseye, 18-buster, 16-buster, 14-buster
ARG VARIANT=20-bullseye-slim ARG VARIANT=20-bullseye
FROM node:${VARIANT} FROM node:${VARIANT}
ARG USERNAME=node ARG USERNAME=node
@ -30,13 +30,13 @@ RUN \
# [Optional] Uncomment this section to install additional OS packages. # [Optional] Uncomment this section to install additional OS packages.
RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \ RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
&& apt-get -y install --no-install-recommends jq gettext-base procps && apt-get -y install --no-install-recommends jq gettext-base moreutils
# [Optional] Uncomment if you want to install more global node modules # [Optional] Uncomment if you want to install more global node modules
# RUN su node -c "npm install -g <your-package-list-here>" # RUN su node -c "npm install -g <your-package-list-here>"
# Expose port for http # Expose port for http
EXPOSE 80 EXPOSE 3000
COPY /scripts /scripts COPY /scripts /scripts

View File

@ -33,8 +33,8 @@ if [ -f ".env" ]; then
rm -f $TMP_ENV rm -f $TMP_ENV
fi fi
for f in $(find . -type f \( -regex '.*.html?' -or -regex ".*.[tj]s\(x\|on\)?$" \) | grep -v 'node_modules' | grep -v '.git'); do for f in $(find "$TRG_DIR" -regex ".*.[tj]sx?$" -type f | grep -v 'node_modules'); do
for e in $(cat "${f}" | tr -s '[:blank:]' '\n' | tr -s '["/\\{},();]' '\n' | tr -s "[']" '\n' | egrep -o -e '^CERC_RUNTIME_ENV_.+$' -e '^LACONIC_HOSTED_CONFIG_.+$'); do for e in $(cat "${f}" | tr -s '[:blank:]' '\n' | tr -s '[{},();]' '\n' | egrep -o '^"CERC_RUNTIME_ENV_[^\"]+"'); do
orig_name=$(echo -n "${e}" | sed 's/"//g') orig_name=$(echo -n "${e}" | sed 's/"//g')
cur_name=$(echo -n "${orig_name}" | sed 's/CERC_RUNTIME_ENV_//g') cur_name=$(echo -n "${orig_name}" | sed 's/CERC_RUNTIME_ENV_//g')
cur_val=$(echo -n "\$${cur_name}" | envsubst) cur_val=$(echo -n "\$${cur_name}" | envsubst)

View File

@ -21,11 +21,6 @@ WORK_DIR="${1:-/app}"
cd "${WORK_DIR}" || exit 1 cd "${WORK_DIR}" || exit 1
# If this file doesn't exist at all, we'll get errors below.
if [ ! -f "next.config.js" ]; then
touch next.config.js
fi
if [ ! -f "next.config.dist" ]; then if [ ! -f "next.config.dist" ]; then
cp next.config.js next.config.dist cp next.config.js next.config.dist
fi fi
@ -109,8 +104,7 @@ CUR_NEXT_VERSION="`jq -r '.dependencies.next' package.json`"
if [ "$CERC_NEXT_VERSION" != "keep" ] && [ "$CUR_NEXT_VERSION" != "$CERC_NEXT_VERSION" ]; then if [ "$CERC_NEXT_VERSION" != "keep" ] && [ "$CUR_NEXT_VERSION" != "$CERC_NEXT_VERSION" ]; then
echo "Changing 'next' version specifier from '$CUR_NEXT_VERSION' to '$CERC_NEXT_VERSION' (set with '--extra-build-args \"--build-arg CERC_NEXT_VERSION=$CERC_NEXT_VERSION\"')" echo "Changing 'next' version specifier from '$CUR_NEXT_VERSION' to '$CERC_NEXT_VERSION' (set with '--extra-build-args \"--build-arg CERC_NEXT_VERSION=$CERC_NEXT_VERSION\"')"
cat package.json | jq ".dependencies.next = \"$CERC_NEXT_VERSION\"" > package.json.$$ cat package.json | jq ".dependencies.next = \"$CERC_NEXT_VERSION\"" | sponge package.json
mv package.json.$$ package.json
fi fi
$CERC_BUILD_TOOL install || exit 1 $CERC_BUILD_TOOL install || exit 1
@ -134,8 +128,7 @@ to use for the build with:
############################################################################### ###############################################################################
EOF EOF
cat package.json | jq ".dependencies.next = \"^$CERC_MIN_NEXTVER\"" > package.json.$$ cat package.json | jq ".dependencies.next = \"^$CERC_MIN_NEXTVER\"" | sponge package.json
mv package.json.$$ package.json
$CERC_BUILD_TOOL install || exit 1 $CERC_BUILD_TOOL install || exit 1
fi fi

View File

@ -42,7 +42,7 @@ if [ "$CERC_NEXTJS_SKIP_GENERATE" != "true" ]; then
while [ $count -lt $CERC_MAX_GENERATE_TIME ] && [ "$generate_done" == "false" ]; do while [ $count -lt $CERC_MAX_GENERATE_TIME ] && [ "$generate_done" == "false" ]; do
sleep 1 sleep 1
count=$((count + 1)) count=$((count + 1))
grep 'rendered as static' gen.out > /dev/null grep 'rendered as static HTML' gen.out > /dev/null
if [ $? -eq 0 ]; then if [ $? -eq 0 ]; then
generate_done="true" generate_done="true"
fi fi
@ -58,4 +58,4 @@ if [ "$CERC_NEXTJS_SKIP_GENERATE" != "true" ]; then
fi fi
fi fi
$CERC_BUILD_TOOL start . -- -p ${CERC_LISTEN_PORT:-80} $CERC_BUILD_TOOL start . -p ${CERC_LISTEN_PORT:-3000}

View File

@ -1,4 +0,0 @@
#!/usr/bin/env bash
# Build the osmosis front end image
source ${CERC_CONTAINER_BASE_DIR}/build-base.sh
docker build -t cerc/osmosis-front-end-urbit:local -f ${CERC_REPO_BASE_DIR}/osmosis-frontend/docker/Dockerfile.static ${build_command_args} ${CERC_REPO_BASE_DIR}/osmosis-frontend

View File

@ -1,4 +0,0 @@
#!/usr/bin/env bash
# Build the osmosis front end image
source ${CERC_CONTAINER_BASE_DIR}/build-base.sh
docker build -t cerc/osmosis-front-end:local -f ${CERC_REPO_BASE_DIR}/osmosis-frontend/docker/Dockerfile.static ${build_command_args} ${CERC_REPO_BASE_DIR}/osmosis-frontend

View File

@ -1,5 +0,0 @@
#!/usr/bin/env bash
# Build the ping pub image
source ${CERC_CONTAINER_BASE_DIR}/build-base.sh
docker build -t cerc/ping-pub:local ${build_command_args} -f $CERC_REPO_BASE_DIR/explorer/Dockerfile $CERC_REPO_BASE_DIR/explorer

Some files were not shown because too many files have changed in this diff Show More