From 1ca7c96daf5f18673ae47bd0aa4e4d7721a45de1 Mon Sep 17 00:00:00 2001 From: David Boreham Date: Tue, 27 Jun 2023 13:46:32 -0600 Subject: [PATCH] Volume processing --- app/deployment.py | 42 ++++++++++++++++-------------- app/deployment_create.py | 56 ++++++++++++++++++++++++++++++---------- 2 files changed, 66 insertions(+), 32 deletions(-) diff --git a/app/deployment.py b/app/deployment.py index d75e792b..fa2236aa 100644 --- a/app/deployment.py +++ b/app/deployment.py @@ -20,10 +20,12 @@ import sys from .deploy import up_operation, down_operation, ps_operation, port_operation, exec_operation, logs_operation, create_deploy_context from .util import global_options + @dataclass class DeploymentContext: dir: Path + @click.group() @click.option("--dir", required=True, help="path to deployment directory") @click.pass_context @@ -50,6 +52,7 @@ def make_deploy_context(ctx): # TODO: add cluster name and env file here return create_deploy_context(ctx.parent.parent.obj, stack_file_path, None, None, None, None) + @command.command() @click.argument('extra_args', nargs=-1) # help: command: up @click.pass_context @@ -59,12 +62,31 @@ def up(ctx, extra_args): up_operation(ctx, services_list) +# start is the preferred alias for up +@command.command() +@click.argument('extra_args', nargs=-1) # help: command: up +@click.pass_context +def start(ctx, extra_args): + ctx.obj = make_deploy_context(ctx) + services_list = list(extra_args) or None + up_operation(ctx, services_list) + + @command.command() @click.argument('extra_args', nargs=-1) # help: command: down @click.pass_context def down(ctx, extra_args): # Get the stack config file name - stack_file_path = ctx.obj.dir.joinpath("stack.yml") + # TODO: add cluster name and env file here + ctx.obj = make_deploy_context(ctx) + down_operation(ctx, extra_args, None) + + +# stop is the preferred alias for down +@command.command() +@click.argument('extra_args', nargs=-1) # help: command: down +@click.pass_context +def stop(ctx, extra_args): # TODO: add cluster name and env file here ctx.obj = make_deploy_context(ctx) down_operation(ctx, extra_args, None) @@ -77,13 +99,6 @@ def ps(ctx): ps_operation(ctx) -@command.command() -@click.pass_context -def logs(ctx): - ctx.obj = make_deploy_context(ctx) - print(f"Context: {ctx.parent.obj}") - - @command.command() @click.argument('extra_args', nargs=-1) # help: command: port @click.pass_context @@ -107,23 +122,12 @@ def logs(ctx, extra_args): logs_operation(ctx, extra_args) -@command.command() -@click.pass_context -def task(ctx): - print(f"Context: {ctx.parent.obj}") - - @command.command() @click.pass_context def status(ctx): print(f"Context: {ctx.parent.obj}") -@command.command() -@click.pass_context -def reset(ctx): - ctx.obj = create_deploy_context(ctx.parent.parent.obj, stack_file_path, None, None, None, None) - #from importlib import resources, util # TODO: figure out how to do this dynamically diff --git a/app/deployment_create.py b/app/deployment_create.py index c7aa1aa1..556b992c 100644 --- a/app/deployment_create.py +++ b/app/deployment_create.py @@ -18,23 +18,56 @@ import os from pathlib import Path from shutil import copyfile, copytree import sys +import yaml from .util import get_stack_file_path, get_parsed_deployment_spec, get_parsed_stack_config, global_options -default_spec_file_content = """stack: mainnet-laconic -data_dir: /my/path -node_name: my-node-name -""" - -def make_default_deployment_dir(): +def _make_default_deployment_dir(): return "deployment-001" + +def _get_compose_file_dir(): + # TODO: refactor to use common code with deploy command + # See: https://stackoverflow.com/questions/25389095/python-get-path-of-root-project-structure + data_dir = Path(__file__).absolute().parent.joinpath("data") + source_compose_dir = data_dir.joinpath("compose") + return source_compose_dir + + +def _get_named_volumes(stack): + # Parse the compose files looking for named volumes + named_volumes = [] + parsed_stack = get_parsed_stack_config(stack) + pods = parsed_stack["pods"] + for pod in pods: + pod_file_path = os.path.join(_get_compose_file_dir(), f"docker-compose-{pod}.yml") + parsed_pod_file = yaml.safe_load(open(pod_file_path, "r")) + if "volumes" in parsed_pod_file: + volumes = parsed_pod_file["volumes"] + for volume in volumes.keys(): + # Volume definition looks like: + # 'laconicd-data': None + named_volumes.append(volume) + return named_volumes + + @click.command() @click.option("--output", required=True, help="Write yaml spec file here") @click.pass_context def init(ctx, output): + stack = global_options(ctx).stack + verbose = global_options(ctx).verbose + spec_file_content = {"stack": stack} + if verbose: + print(f"Creating spec file for stack: {stack}") + named_volumes = _get_named_volumes(stack) + if named_volumes: + volume_descriptors = {} + for named_volume in named_volumes: + volume_descriptors[named_volume] = f"./data/{named_volume}" + spec_file_content["volumes"] = volume_descriptors with open(output, "w") as output_file: - output_file.write(default_spec_file_content) + yaml.dump(spec_file_content, output_file) @click.command() @@ -50,7 +83,7 @@ def create(ctx, spec_file, deployment_dir): if global_options(ctx).debug: print(f"parsed spec: {parsed_spec}") if deployment_dir is None: - deployment_dir = make_default_deployment_dir() + deployment_dir = _make_default_deployment_dir() if os.path.exists(deployment_dir): print(f"Error: {deployment_dir} already exists") sys.exit(1) @@ -60,14 +93,11 @@ def create(ctx, spec_file, deployment_dir): copyfile(stack_file, os.path.join(deployment_dir, os.path.basename(stack_file))) # Copy the pod files into the deployment dir pods = parsed_stack['pods'] - # TODO: refactor to use common code with deploy command - # See: https://stackoverflow.com/questions/25389095/python-get-path-of-root-project-structure - data_dir = Path(__file__).absolute().parent.joinpath("data") - source_compose_dir = data_dir.joinpath("compose") destination_compose_dir = os.path.join(deployment_dir, "compose") os.mkdir(destination_compose_dir) + data_dir = Path(__file__).absolute().parent.joinpath("data") for pod in pods: - pod_file_path = os.path.join(source_compose_dir, f"docker-compose-{pod}.yml") + pod_file_path = os.path.join(_get_compose_file_dir(), f"docker-compose-{pod}.yml") copyfile(pod_file_path, os.path.join(destination_compose_dir, os.path.basename(pod_file_path))) # Copy the config files for the pod, if any source_config_dir = data_dir.joinpath("config", pod)