diff --git a/app/base.py b/app/base.py index a7721141..abb1fa9d 100644 --- a/app/base.py +++ b/app/base.py @@ -15,7 +15,7 @@ import os from abc import ABC, abstractmethod -from .deploy import get_stack_status +from app.deploy import get_stack_status from decouple import config diff --git a/app/build_containers.py b/app/build_containers.py index 3523dbc3..b3b6295e 100644 --- a/app/build_containers.py +++ b/app/build_containers.py @@ -27,8 +27,8 @@ import subprocess import click import importlib.resources from pathlib import Path -from .util import include_exclude_check, get_parsed_stack_config -from .base import get_npm_registry_url +from app.util import include_exclude_check, get_parsed_stack_config +from app.base import get_npm_registry_url # TODO: find a place for this # epilog="Config provided either in .env or settings.ini or env vars: CERC_REPO_BASE_DIR (defaults to ~/cerc)" @@ -67,7 +67,7 @@ def command(ctx, include, exclude, force_rebuild, extra_build_args): print('Dev root directory doesn\'t exist, creating') # See: https://stackoverflow.com/a/20885799/1701505 - from . import data + from app import data with importlib.resources.open_text(data, "container-image-list.txt") as container_list_file: all_containers = container_list_file.read().splitlines() diff --git a/app/build_npms.py b/app/build_npms.py index d56b2774..6555ba91 100644 --- a/app/build_npms.py +++ b/app/build_npms.py @@ -25,8 +25,8 @@ from decouple import config import click import importlib.resources from python_on_whales import docker, DockerException -from .base import get_stack -from .util import include_exclude_check, get_parsed_stack_config +from app.base import get_stack +from app.util import include_exclude_check, get_parsed_stack_config builder_js_image_name = "cerc/builder-js:local" @@ -81,7 +81,7 @@ def command(ctx, include, exclude, force_rebuild, extra_build_args): os.makedirs(build_root_path) # See: https://stackoverflow.com/a/20885799/1701505 - from . import data + from app import data with importlib.resources.open_text(data, "npm-package-list.txt") as package_list_file: all_packages = package_list_file.read().splitlines() diff --git a/app/data/compose/docker-compose-mainnet-laconicd.yml b/app/data/compose/docker-compose-mainnet-laconicd.yml index 4de68d7b..78d2cd2f 100644 --- a/app/data/compose/docker-compose-mainnet-laconicd.yml +++ b/app/data/compose/docker-compose-mainnet-laconicd.yml @@ -2,7 +2,7 @@ services: laconicd: restart: no image: cerc/laconicd:local - command: ["sh", "/docker-entrypoint-scripts.d/create-fixturenet.sh"] + command: ["/bin/sh", "-c", "while :; do sleep 600; done"] volumes: # The cosmos-sdk node's database directory: - laconicd-data:/root/.laconicd/data diff --git a/app/data/stacks/mainnet-laconic/deploy/commands.py b/app/data/stacks/mainnet-laconic/deploy/commands.py index 0ac4845f..a8a62bd7 100644 --- a/app/data/stacks/mainnet-laconic/deploy/commands.py +++ b/app/data/stacks/mainnet-laconic/deploy/commands.py @@ -13,45 +13,50 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . -import click -import os -from shutil import copyfile -import sys -from .util import get_stack_config_filename, get_parsed_deployment_spec +from dataclasses import dataclass +from app.util import get_yaml +from app.stack_state import State -default_spec_file_content = """stack: mainnet-laconic -data_dir: /my/path -node_name: my-node-name +default_spec_file_content = """config: + node_moniker: my-node-name + chain_id: my-chain-id """ +init_help_text = """Add helpful text here on setting config variables. +""" -def make_default_deployment_dir(): - return "deployment-001" - -@click.command() -@click.option("--output", required=True, help="Write yaml spec file here") -@click.pass_context -def init(ctx, output): - with open(output, "w") as output_file: - output_file.write(default_spec_file_content) +@dataclass +class VolumeMapping: + host_path: str + container_path: str -@click.command() -@click.option("--spec-file", required=True, help="Spec file to use to create this deployment") -@click.option("--deployment-dir", help="Create deployment files in this directory") -@click.pass_context -def create(ctx, spec_file, deployment_dir): - # This function fails with a useful error message if the file doens't exist - parsed_spec = get_parsed_deployment_spec(spec_file) - if ctx.debug: - print(f"parsed spec: {parsed_spec}") - if deployment_dir is None: - deployment_dir = make_default_deployment_dir() - if os.path.exists(deployment_dir): - print(f"Error: {deployment_dir} already exists") - sys.exit(1) - os.mkdir(deployment_dir) - # Copy spec file and the stack file into the deployment dir - copyfile(spec_file, os.path.join(deployment_dir, os.path.basename(spec_file))) - stack_file = get_stack_config_filename(parsed_spec.stack) - copyfile(stack_file, os.path.join(deployment_dir, os.path.basename(stack_file))) +# In order to make this, we need the ability to run the stack +# In theory we can make this same way as we would run deploy up +def run_container_command(ctx, ontainer, command, mounts): + deploy_context = ctx.obj + pass + + +def setup(ctx): + node_moniker = "dbdb-node" + chain_id = "laconic_81337-1" + mounts = [ + VolumeMapping("./path", "~/.laconicd") + ] + output, status = run_container_command(ctx, "laconicd", f"laconicd init {node_moniker} --chain-id {chain_id}", mounts) + + +def init(command_context): + print(init_help_text) + yaml = get_yaml() + return yaml.load(default_spec_file_content) + + +def get_state(command_context): + print("Here we get state") + return State.CONFIGURED + + +def change_state(command_context): + pass diff --git a/app/data/stacks/mainnet-laconic/stack.yml b/app/data/stacks/mainnet-laconic/stack.yml index b5e1f16c..51b0b8a2 100644 --- a/app/data/stacks/mainnet-laconic/stack.yml +++ b/app/data/stacks/mainnet-laconic/stack.yml @@ -25,7 +25,4 @@ containers: pods: - mainnet-laconicd - fixturenet-laconic-console -config: - cli: - key: laconicd.mykey - address: laconicd.myaddress + diff --git a/app/deploy.py b/app/deploy.py index 3f769f3e..41af70f6 100644 --- a/app/deploy.py +++ b/app/deploy.py @@ -26,9 +26,10 @@ import subprocess from python_on_whales import DockerClient, DockerException import click from pathlib import Path -from .util import include_exclude_check, get_parsed_stack_config, global_options2 -from .deployment_create import create as deployment_create -from .deployment_create import init as deployment_init +from app.util import include_exclude_check, get_parsed_stack_config, global_options2 +from app.deployment_create import create as deployment_create +from app.deployment_create import init as deployment_init +from app.deployment_create import setup as deployment_setup class DeployCommandContext(object): @@ -263,7 +264,7 @@ def _make_cluster_context(ctx, stack, include, exclude, cluster, env_file): print(f"Using cluster name: {cluster}") # See: https://stackoverflow.com/a/20885799/1701505 - from . import data + from app import data with resources.open_text(data, "pod-list.txt") as pod_list_file: all_pods = pod_list_file.read().splitlines() @@ -420,3 +421,4 @@ def _orchestrate_cluster_config(ctx, cluster_config, docker, container_exec_env) command.add_command(deployment_init) command.add_command(deployment_create) +command.add_command(deployment_setup) diff --git a/app/deployment.py b/app/deployment.py index 921f69d7..e876b77e 100644 --- a/app/deployment.py +++ b/app/deployment.py @@ -17,8 +17,8 @@ import click from dataclasses import dataclass from pathlib import Path import sys -from .deploy import up_operation, down_operation, ps_operation, port_operation, exec_operation, logs_operation, create_deploy_context -from .util import global_options +from app.deploy import up_operation, down_operation, ps_operation, port_operation, exec_operation, logs_operation, create_deploy_context +from app.util import global_options @dataclass @@ -128,15 +128,3 @@ def logs(ctx, extra_args): @click.pass_context def status(ctx): print(f"Context: {ctx.parent.obj}") - - - -#from importlib import resources, util -# TODO: figure out how to do this dynamically -#stack = "mainnet-laconic" -#module_name = "commands" -#spec = util.spec_from_file_location(module_name, "./app/data/stacks/" + stack + "/deploy/commands.py") -#imported_stack = util.module_from_spec(spec) -#spec.loader.exec_module(imported_stack) -#command.add_command(imported_stack.init) -#command.add_command(imported_stack.create) diff --git a/app/deployment_create.py b/app/deployment_create.py index 98aad990..ff06664a 100644 --- a/app/deployment_create.py +++ b/app/deployment_create.py @@ -14,20 +14,12 @@ # along with this program. If not, see . import click +from importlib import util import os from pathlib import Path from shutil import copyfile, copytree import sys -import ruamel.yaml -from .util import get_stack_file_path, get_parsed_deployment_spec, get_parsed_stack_config, global_options - - -def _get_yaml(): - # See: https://stackoverflow.com/a/45701840/1701505 - yaml = ruamel.yaml.YAML() - yaml.preserve_quotes = True - yaml.indent(sequence=3, offset=1) - return yaml +from app.util import get_stack_file_path, get_parsed_deployment_spec, get_parsed_stack_config, global_options, get_yaml def _make_default_deployment_dir(): @@ -47,7 +39,7 @@ def _get_named_volumes(stack): named_volumes = [] parsed_stack = get_parsed_stack_config(stack) pods = parsed_stack["pods"] - yaml = _get_yaml() + yaml = get_yaml() for pod in pods: pod_file_path = os.path.join(_get_compose_file_dir(), f"docker-compose-{pod}.yml") parsed_pod_file = yaml.load(open(pod_file_path, "r")) @@ -96,6 +88,29 @@ def _fixup_pod_file(pod, spec, compose_dir): pod["volumes"][volume] = new_volume_spec +def call_stack_deploy_init(stack): + # Link with the python file in the stack + # Call a function in it + # If no function found, return None + python_file_path = get_stack_file_path(stack).parent.joinpath("deploy", "commands.py") + spec = util.spec_from_file_location("commands", python_file_path) + imported_stack = util.module_from_spec(spec) + spec.loader.exec_module(imported_stack) + return imported_stack.init(None) + + +# TODO: fold this with function above +def call_stack_deploy_setup(stack): + # Link with the python file in the stack + # Call a function in it + # If no function found, return None + python_file_path = get_stack_file_path(stack).parent.joinpath("deploy", "commands.py") + spec = util.spec_from_file_location("commands", python_file_path) + imported_stack = util.module_from_spec(spec) + spec.loader.exec_module(imported_stack) + return imported_stack.setup(None) + + # Inspect the pod yaml to find config files referenced in subdirectories # other than the one associated with the pod def _find_extra_config_dirs(parsed_pod_file, pod): @@ -118,17 +133,19 @@ def _find_extra_config_dirs(parsed_pod_file, pod): @click.option("--output", required=True, help="Write yaml spec file here") @click.pass_context def init(ctx, output): - yaml = _get_yaml() + yaml = get_yaml() stack = global_options(ctx).stack verbose = global_options(ctx).verbose + default_spec_file_content = call_stack_deploy_init(stack) spec_file_content = {"stack": stack} + spec_file_content.update(default_spec_file_content) if verbose: print(f"Creating spec file for stack: {stack}") named_volumes = _get_named_volumes(stack) if named_volumes: volume_descriptors = {} for named_volume in named_volumes: - volume_descriptors[named_volume] = f"../data/{named_volume}" + volume_descriptors[named_volume] = f"./data/{named_volume}" spec_file_content["volumes"] = volume_descriptors with open(output, "w") as output_file: yaml.dump(spec_file_content, output_file) @@ -160,7 +177,7 @@ def create(ctx, spec_file, deployment_dir): destination_compose_dir = os.path.join(deployment_dir, "compose") os.mkdir(destination_compose_dir) data_dir = Path(__file__).absolute().parent.joinpath("data") - yaml = _get_yaml() + yaml = get_yaml() for pod in pods: pod_file_path = os.path.join(_get_compose_file_dir(), f"docker-compose-{pod}.yml") parsed_pod_file = yaml.load(open(pod_file_path, "r")) @@ -180,3 +197,16 @@ def create(ctx, spec_file, deployment_dir): # If the same config dir appears in multiple pods, it may already have been copied if not os.path.exists(destination_config_dir): copytree(source_config_dir, destination_config_dir) + + +@click.command() +@click.option("--node-moniker", help="Help goes here") +@click.option("--key-name", help="Help goes here") +@click.option("--initialize-network", is_flag=True, default=False, help="Help goes here") +@click.option("--join-network", is_flag=True, default=False, help="Help goes here") +@click.option("--create-network", is_flag=True, default=False, help="Help goes here") +@click.pass_context +def setup(ctx, node_moniker, key_name, initialize_network, join_network, create_network): + stack = global_options(ctx).stack + call_stack_deploy_setup(stack) + diff --git a/app/setup_repositories.py b/app/setup_repositories.py index db0bd779..d275a986 100644 --- a/app/setup_repositories.py +++ b/app/setup_repositories.py @@ -25,7 +25,7 @@ import click import importlib.resources from pathlib import Path import yaml -from .util import include_exclude_check +from app.util import include_exclude_check class GitProgress(git.RemoteProgress): @@ -227,7 +227,7 @@ def command(ctx, include, exclude, git_ssh, check_only, pull, branches, branches os.makedirs(dev_root_path) # See: https://stackoverflow.com/a/20885799/1701505 - from . import data + from app import data with importlib.resources.open_text(data, "repository-list.txt") as repository_list_file: all_repos = repository_list_file.read().splitlines() diff --git a/app/stack_state.py b/app/stack_state.py new file mode 100644 index 00000000..830a47f7 --- /dev/null +++ b/app/stack_state.py @@ -0,0 +1,22 @@ +# Copyright © 2023 Cerc + +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. + +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +from enum import Enum + +class State(Enum): + CREATED = 1 + CONFIGURED = 2 + STARTED = 3 + STOPPED = 4 diff --git a/app/util.py b/app/util.py index 69eda4af..2b12cfbc 100644 --- a/app/util.py +++ b/app/util.py @@ -15,7 +15,7 @@ import os.path import sys -import yaml +import ruamel.yaml from pathlib import Path @@ -42,7 +42,7 @@ def get_parsed_stack_config(stack): stack_file_path = stack if isinstance(stack, os.PathLike) else get_stack_file_path(stack) try: with stack_file_path: - stack_config = yaml.safe_load(open(stack_file_path, "r")) + stack_config = get_yaml().load(open(stack_file_path, "r")) return stack_config except FileNotFoundError as error: # We try here to generate a useful diagnostic error @@ -60,7 +60,7 @@ def get_parsed_deployment_spec(spec_file): spec_file_path = Path(spec_file) try: with spec_file_path: - deploy_spec = yaml.safe_load(open(spec_file_path, "r")) + deploy_spec = get_yaml().load(open(spec_file_path, "r")) return deploy_spec except FileNotFoundError as error: # We try here to generate a useful diagnostic error @@ -69,6 +69,14 @@ def get_parsed_deployment_spec(spec_file): sys.exit(1) +def get_yaml(): + # See: https://stackoverflow.com/a/45701840/1701505 + yaml = ruamel.yaml.YAML() + yaml.preserve_quotes = True + yaml.indent(sequence=3, offset=1) + return yaml + + # TODO: this is fragile wrt to the subcommand depth # See also: https://github.com/pallets/click/issues/108 def global_options(ctx): diff --git a/app/version.py b/app/version.py index 4194f24a..7af18dc1 100644 --- a/app/version.py +++ b/app/version.py @@ -22,7 +22,7 @@ def command(ctx): '''print tool version''' # See: https://stackoverflow.com/a/20885799/1701505 - from . import data + from app import data with importlib.resources.open_text(data, "build_tag.txt") as version_file: # TODO: code better version that skips comment lines version_string = version_file.read().splitlines()[1]