diff --git a/app/data/compose/docker-compose-fixturenet-optimism.yml b/app/data/compose/docker-compose-fixturenet-optimism.yml index ddf7e290..c5804a14 100644 --- a/app/data/compose/docker-compose-fixturenet-optimism.yml +++ b/app/data/compose/docker-compose-fixturenet-optimism.yml @@ -70,7 +70,6 @@ services: command: "/run-op-geth.sh" ports: - "0.0.0.0:8545:8545" - - "0.0.0.0:8546:8546" healthcheck: test: ["CMD", "nc", "-vz", "localhost:8545"] interval: 30s diff --git a/app/data/container-image-list.txt b/app/data/container-image-list.txt index 256f0a6f..f6f2b612 100644 --- a/app/data/container-image-list.txt +++ b/app/data/container-image-list.txt @@ -14,7 +14,6 @@ cerc/laconic-registry-cli cerc/laconic-console-host cerc/fixturenet-eth-geth cerc/fixturenet-eth-lighthouse -cerc/fixturenet-eth-genesis cerc/watcher-ts cerc/watcher-mobymask cerc/watcher-erc20 diff --git a/app/data/stacks/mobymask-v3/stack.yml b/app/data/stacks/mobymask-v3/stack.yml index fce0f312..b07b3680 100644 --- a/app/data/stacks/mobymask-v3/stack.yml +++ b/app/data/stacks/mobymask-v3/stack.yml @@ -2,11 +2,11 @@ version: "1.0" description: "MobyMask v3 stack" name: mobymask-v3 repos: - - github.com/cerc-io/ts-nitro@v0.1.13 - - github.com/cerc-io/watcher-ts@v0.2.63 - - github.com/cerc-io/mobymask-v2-watcher-ts@v0.2.2 + - github.com/cerc-io/ts-nitrov0.1.12 + - github.com/cerc-io/watcher-ts@v0.2.57 + - github.com/cerc-io/mobymask-v2-watcher-ts@v3 # TODO: Update after fixes - github.com/cerc-io/MobyMask@v0.1.3 - - github.com/cerc-io/mobymask-ui@v0.2.1 + - github.com/cerc-io/mobymask-ui@v0.2.0 containers: - cerc/nitro-contracts - cerc/watcher-ts diff --git a/app/data/stacks/mobymask-v3/watcher.md b/app/data/stacks/mobymask-v3/watcher.md index c21fbab3..1aa63c1d 100644 --- a/app/data/stacks/mobymask-v3/watcher.md +++ b/app/data/stacks/mobymask-v3/watcher.md @@ -23,7 +23,7 @@ laconic-so --stack mobymask-v3 build-containers --exclude cerc/mobymask-ui Create and update an env file to be used in the next step ([defaults](../../config/watcher-mobymask-v3/mobymask-params.env)): ```bash - # External ETH RPC endpoint for contract(s) deployment + # External ETH RPC endpoint (L2 Optimism geth) CERC_ETH_RPC_ENDPOINT= # External ETH RPC endpoint used for queries in the watcher @@ -32,9 +32,6 @@ Create and update an env file to be used in the next step ([defaults](../../conf # External ETH RPC endpoint used for mutations in the watcher CERC_ETH_RPC_MUTATION_ENDPOINT= - # External ETH endpoint used by watcher's Nitro node - CERC_NITRO_CHAIN_URL= - # Specify the an account PK for contract deployment CERC_PRIVATE_KEY_DEPLOYER= diff --git a/app/data/stacks/package-registry/stack.yml b/app/data/stacks/package-registry/stack.yml index 9d75925f..33c6c939 100644 --- a/app/data/stacks/package-registry/stack.yml +++ b/app/data/stacks/package-registry/stack.yml @@ -2,7 +2,7 @@ version: "1.1" name: package-registry description: "Local Package Registry" repos: - - github.com/cerc-io/hosting + - git.vdb.to/cerc-io/hosting - gitea.com/gitea/act_runner containers: - cerc/act-runner diff --git a/app/deploy.py b/app/deploy.py index 51749ff9..9298148a 100644 --- a/app/deploy.py +++ b/app/deploy.py @@ -20,13 +20,12 @@ import copy import os import sys from dataclasses import dataclass -from decouple import config from importlib import resources import subprocess from python_on_whales import DockerClient, DockerException import click from pathlib import Path -from app.util import include_exclude_check, get_parsed_stack_config, global_options2 +from app.util import include_exclude_check, get_parsed_stack_config, global_options2, get_dev_root_path from app.deploy_types import ClusterContext, DeployCommandContext from app.deployment_create import create as deployment_create from app.deployment_create import init as deployment_init @@ -235,17 +234,15 @@ def _make_runtime_env(ctx): # stack has to be either PathLike pointing to a stack yml file, or a string with the name of a known stack def _make_cluster_context(ctx, stack, include, exclude, cluster, env_file): - if ctx.local_stack: - dev_root_path = os.getcwd()[0:os.getcwd().rindex("stack-orchestrator")] - print(f'Local stack dev_root_path (CERC_REPO_BASE_DIR) overridden to: {dev_root_path}') - else: - dev_root_path = os.path.expanduser(config("CERC_REPO_BASE_DIR", default="~/cerc")) + dev_root_path = get_dev_root_path(ctx) # TODO: huge hack, fix this # If the caller passed a path for the stack file, then we know that we can get the compose files # from the same directory + deployment = False if isinstance(stack, os.PathLike): compose_dir = stack.parent.joinpath("compose") + deployment = True else: # See: https://stackoverflow.com/questions/25389095/python-get-path-of-root-project-structure compose_dir = Path(__file__).absolute().parent.joinpath("data", "compose") @@ -296,14 +293,24 @@ def _make_cluster_context(ctx, stack, include, exclude, cluster, env_file): if pod_repository is None or pod_repository == "internal": compose_file_name = os.path.join(compose_dir, f"docker-compose-{pod_path}.yml") else: - pod_root_dir = os.path.join(dev_root_path, pod_repository.split("/")[-1], pod["path"]) - compose_file_name = os.path.join(pod_root_dir, "docker-compose.yml") - pod_pre_start_command = pod["pre_start_command"] - pod_post_start_command = pod["post_start_command"] - if pod_pre_start_command is not None: - pre_start_commands.append(os.path.join(pod_root_dir, pod_pre_start_command)) - if pod_post_start_command is not None: - post_start_commands.append(os.path.join(pod_root_dir, pod_post_start_command)) + if deployment: + compose_file_name = os.path.join(compose_dir, "docker-compose.yml") + pod_pre_start_command = pod["pre_start_command"] + pod_post_start_command = pod["post_start_command"] + script_dir = compose_dir.parent.joinpath("pods", pod_name, "scripts") + if pod_pre_start_command is not None: + pre_start_commands.append(os.path.join(script_dir, pod_pre_start_command)) + if pod_post_start_command is not None: + post_start_commands.append(os.path.join(script_dir, pod_post_start_command)) + else: + pod_root_dir = os.path.join(dev_root_path, pod_repository.split("/")[-1], pod["path"]) + compose_file_name = os.path.join(pod_root_dir, "docker-compose.yml") + pod_pre_start_command = pod["pre_start_command"] + pod_post_start_command = pod["post_start_command"] + if pod_pre_start_command is not None: + pre_start_commands.append(os.path.join(pod_root_dir, pod_pre_start_command)) + if pod_post_start_command is not None: + post_start_commands.append(os.path.join(pod_root_dir, pod_post_start_command)) compose_files.append(compose_file_name) else: if ctx.verbose: diff --git a/app/deploy_util.py b/app/deploy_util.py index 2f5f0188..498e3dfd 100644 --- a/app/deploy_util.py +++ b/app/deploy_util.py @@ -16,14 +16,14 @@ import os from typing import List from app.deploy_types import DeployCommandContext, VolumeMapping -from app.util import get_parsed_stack_config, get_yaml, get_compose_file_dir +from app.util import get_parsed_stack_config, get_yaml, get_compose_file_dir, get_pod_list def _container_image_from_service(stack: str, service: str): # Parse the compose files looking for the image name of the specified service image_name = None parsed_stack = get_parsed_stack_config(stack) - pods = parsed_stack["pods"] + pods = get_pod_list(parsed_stack) yaml = get_yaml() for pod in pods: pod_file_path = os.path.join(get_compose_file_dir(), f"docker-compose-{pod}.yml") diff --git a/app/deployment_create.py b/app/deployment_create.py index 76016262..c6128db6 100644 --- a/app/deployment_create.py +++ b/app/deployment_create.py @@ -17,12 +17,13 @@ import click from importlib import util import os from pathlib import Path +from typing import List import random -from shutil import copyfile, copytree +from shutil import copy, copyfile, copytree import sys -from app.util import get_stack_file_path, get_parsed_deployment_spec, get_parsed_stack_config, global_options, get_yaml -from app.util import get_compose_file_dir -from app.deploy_types import DeploymentContext, LaconicStackSetupCommand +from app.util import (get_stack_file_path, get_parsed_deployment_spec, get_parsed_stack_config, global_options, get_yaml, + get_pod_list, get_pod_file_path, pod_has_scripts, get_pod_script_paths, get_plugin_code_path) +from app.deploy_types import DeploymentContext, DeployCommandContext, LaconicStackSetupCommand def _make_default_deployment_dir(): @@ -32,10 +33,10 @@ def _make_default_deployment_dir(): def _get_ports(stack): ports = {} parsed_stack = get_parsed_stack_config(stack) - pods = parsed_stack["pods"] + pods = get_pod_list(parsed_stack) yaml = get_yaml() for pod in pods: - pod_file_path = os.path.join(get_compose_file_dir(), f"docker-compose-{pod}.yml") + pod_file_path = get_pod_file_path(parsed_stack, pod) parsed_pod_file = yaml.load(open(pod_file_path, "r")) if "services" in parsed_pod_file: for svc_name, svc in parsed_pod_file["services"].items(): @@ -49,10 +50,10 @@ def _get_named_volumes(stack): # Parse the compose files looking for named volumes named_volumes = [] parsed_stack = get_parsed_stack_config(stack) - pods = parsed_stack["pods"] + pods = get_pod_list(parsed_stack) yaml = get_yaml() for pod in pods: - pod_file_path = os.path.join(get_compose_file_dir(), f"docker-compose-{pod}.yml") + pod_file_path = get_pod_file_path(parsed_stack, pod) parsed_pod_file = yaml.load(open(pod_file_path, "r")) if "volumes" in parsed_pod_file: volumes = parsed_pod_file["volumes"] @@ -105,11 +106,16 @@ def _fixup_pod_file(pod, spec, compose_dir): pod["services"][container_name]["ports"] = container_ports +def _commands_plugin_path(ctx: DeployCommandContext): + plugin_path = get_plugin_code_path(ctx.stack) + return plugin_path.joinpath("deploy", "commands.py") + + def call_stack_deploy_init(deploy_command_context): # Link with the python file in the stack # Call a function in it # If no function found, return None - python_file_path = get_stack_file_path(deploy_command_context.stack).parent.joinpath("deploy", "commands.py") + python_file_path = _commands_plugin_path(deploy_command_context) if python_file_path.exists(): spec = util.spec_from_file_location("commands", python_file_path) imported_stack = util.module_from_spec(spec) @@ -124,7 +130,8 @@ def call_stack_deploy_setup(deploy_command_context, parameters: LaconicStackSetu # Link with the python file in the stack # Call a function in it # If no function found, return None - python_file_path = get_stack_file_path(deploy_command_context.stack).parent.joinpath("deploy", "commands.py") + python_file_path = _commands_plugin_path(deploy_command_context) + print(f"Path: {python_file_path}") if python_file_path.exists(): spec = util.spec_from_file_location("commands", python_file_path) imported_stack = util.module_from_spec(spec) @@ -139,7 +146,7 @@ def call_stack_deploy_create(deployment_context, extra_args): # Link with the python file in the stack # Call a function in it # If no function found, return None - python_file_path = get_stack_file_path(deployment_context.command_context.stack).parent.joinpath("deploy", "commands.py") + python_file_path = _commands_plugin_path(deployment_context.command_context) if python_file_path.exists(): spec = util.spec_from_file_location("commands", python_file_path) imported_stack = util.module_from_spec(spec) @@ -263,14 +270,21 @@ def init(ctx, config, output, map_ports_to_host): def _write_config_file(spec_file: Path, config_env_file: Path): spec_content = get_parsed_deployment_spec(spec_file) - if spec_content["config"]: - config_vars = spec_content["config"] - if config_vars: - with open(config_env_file, "w") as output_file: + # Note: we want to write an empty file even if we have no config variables + with open(config_env_file, "w") as output_file: + if "config" in spec_content and spec_content["config"]: + config_vars = spec_content["config"] + if config_vars: for variable_name, variable_value in config_vars.items(): output_file.write(f"{variable_name}={variable_value}\n") +def _copy_files_to_directory(file_paths: List[Path], directory: Path): + for path in file_paths: + # Using copy to preserve the execute bit + copy(path, os.path.join(directory, os.path.basename(path))) + + @click.command() @click.option("--spec-file", required=True, help="Spec file to use to create this deployment") @click.option("--deployment-dir", help="Create deployment files in this directory") @@ -298,15 +312,19 @@ def create(ctx, spec_file, deployment_dir, network_dir, initial_peers): # Copy any config varibles from the spec file into an env file suitable for compose _write_config_file(spec_file, os.path.join(deployment_dir, "config.env")) # Copy the pod files into the deployment dir, fixing up content - pods = parsed_stack['pods'] + pods = get_pod_list(parsed_stack) destination_compose_dir = os.path.join(deployment_dir, "compose") os.mkdir(destination_compose_dir) + destination_pods_dir = os.path.join(deployment_dir, "pods") + os.mkdir(destination_pods_dir) data_dir = Path(__file__).absolute().parent.joinpath("data") yaml = get_yaml() for pod in pods: - pod_file_path = os.path.join(get_compose_file_dir(), f"docker-compose-{pod}.yml") + pod_file_path = get_pod_file_path(parsed_stack, pod) parsed_pod_file = yaml.load(open(pod_file_path, "r")) extra_config_dirs = _find_extra_config_dirs(parsed_pod_file, pod) + destination_pod_dir = os.path.join(destination_pods_dir, pod) + os.mkdir(destination_pod_dir) if global_options(ctx).debug: print(f"extra config dirs: {extra_config_dirs}") _fixup_pod_file(parsed_pod_file, parsed_spec, destination_compose_dir) @@ -322,6 +340,12 @@ def create(ctx, spec_file, deployment_dir, network_dir, initial_peers): # If the same config dir appears in multiple pods, it may already have been copied if not os.path.exists(destination_config_dir): copytree(source_config_dir, destination_config_dir) + # Copy the script files for the pod, if any + if pod_has_scripts(parsed_stack, pod): + destination_script_dir = os.path.join(destination_pod_dir, "scripts") + os.mkdir(destination_script_dir) + script_paths = get_pod_script_paths(parsed_stack, pod) + _copy_files_to_directory(script_paths, destination_script_dir) # Delegate to the stack's Python code # The deploy create command doesn't require a --stack argument so we need to insert the # stack member here. diff --git a/app/util.py b/app/util.py index 9d9eaa33..a25aacdb 100644 --- a/app/util.py +++ b/app/util.py @@ -13,6 +13,7 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . +from decouple import config import os.path import sys import ruamel.yaml @@ -37,6 +38,16 @@ def get_stack_file_path(stack): return stack_file_path +def get_dev_root_path(ctx): + if ctx and ctx.local_stack: + # TODO: This code probably doesn't work + dev_root_path = os.getcwd()[0:os.getcwd().rindex("stack-orchestrator")] + print(f'Local stack dev_root_path (CERC_REPO_BASE_DIR) overridden to: {dev_root_path}') + else: + dev_root_path = os.path.expanduser(config("CERC_REPO_BASE_DIR", default="~/cerc")) + return dev_root_path + + # Caller can pass either the name of a stack, or a path to a stack file def get_parsed_stack_config(stack): stack_file_path = stack if isinstance(stack, os.PathLike) else get_stack_file_path(stack) @@ -56,6 +67,68 @@ def get_parsed_stack_config(stack): sys.exit(1) +def get_pod_list(parsed_stack): + # Handle both old and new format + pods = parsed_stack["pods"] + if type(pods[0]) is str: + result = pods + else: + result = [] + for pod in pods: + result.append(pod["name"]) + return result + + +def get_plugin_code_path(stack): + parsed_stack = get_parsed_stack_config(stack) + pods = parsed_stack["pods"] + # TODO: Hack + pod = pods[0] + if type(pod) is str: + result = get_stack_file_path(stack).parent + else: + pod_root_dir = os.path.join(get_dev_root_path(None), pod["repository"].split("/")[-1], pod["path"]) + result = Path(os.path.join(pod_root_dir, "stack")) + return result + + +def get_pod_file_path(parsed_stack, pod_name: str): + pods = parsed_stack["pods"] + if type(pods[0]) is str: + result = os.path.join(get_compose_file_dir(), f"docker-compose-{pod_name}.yml") + else: + for pod in pods: + if pod["name"] == pod_name: + pod_root_dir = os.path.join(get_dev_root_path(None), pod["repository"].split("/")[-1], pod["path"]) + result = os.path.join(pod_root_dir, "docker-compose.yml") + return result + + +def get_pod_script_paths(parsed_stack, pod_name: str): + pods = parsed_stack["pods"] + result = [] + if not type(pods[0]) is str: + for pod in pods: + if pod["name"] == pod_name: + pod_root_dir = os.path.join(get_dev_root_path(None), pod["repository"].split("/")[-1], pod["path"]) + if "pre_start_command" in pod: + result.append(os.path.join(pod_root_dir, pod["pre_start_command"])) + if "post_start_command" in pod: + result.append(os.path.join(pod_root_dir, pod["post_start_command"])) + return result + + +def pod_has_scripts(parsed_stack, pod_name: str): + pods = parsed_stack["pods"] + if type(pods[0]) is str: + result = False + else: + for pod in pods: + if pod["name"] == pod_name: + result = "pre_start_command" in pod or "post_start_command" in pod + return result + + def get_compose_file_dir(): # TODO: refactor to use common code with deploy command # See: https://stackoverflow.com/questions/25389095/python-get-path-of-root-project-structure