Volume processing
This commit is contained in:
parent
fa5cad1533
commit
1ca7c96daf
@ -20,10 +20,12 @@ import sys
|
|||||||
from .deploy import up_operation, down_operation, ps_operation, port_operation, exec_operation, logs_operation, create_deploy_context
|
from .deploy import up_operation, down_operation, ps_operation, port_operation, exec_operation, logs_operation, create_deploy_context
|
||||||
from .util import global_options
|
from .util import global_options
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class DeploymentContext:
|
class DeploymentContext:
|
||||||
dir: Path
|
dir: Path
|
||||||
|
|
||||||
|
|
||||||
@click.group()
|
@click.group()
|
||||||
@click.option("--dir", required=True, help="path to deployment directory")
|
@click.option("--dir", required=True, help="path to deployment directory")
|
||||||
@click.pass_context
|
@click.pass_context
|
||||||
@ -50,6 +52,7 @@ def make_deploy_context(ctx):
|
|||||||
# TODO: add cluster name and env file here
|
# TODO: add cluster name and env file here
|
||||||
return create_deploy_context(ctx.parent.parent.obj, stack_file_path, None, None, None, None)
|
return create_deploy_context(ctx.parent.parent.obj, stack_file_path, None, None, None, None)
|
||||||
|
|
||||||
|
|
||||||
@command.command()
|
@command.command()
|
||||||
@click.argument('extra_args', nargs=-1) # help: command: up <service1> <service2>
|
@click.argument('extra_args', nargs=-1) # help: command: up <service1> <service2>
|
||||||
@click.pass_context
|
@click.pass_context
|
||||||
@ -59,12 +62,31 @@ def up(ctx, extra_args):
|
|||||||
up_operation(ctx, services_list)
|
up_operation(ctx, services_list)
|
||||||
|
|
||||||
|
|
||||||
|
# start is the preferred alias for up
|
||||||
|
@command.command()
|
||||||
|
@click.argument('extra_args', nargs=-1) # help: command: up <service1> <service2>
|
||||||
|
@click.pass_context
|
||||||
|
def start(ctx, extra_args):
|
||||||
|
ctx.obj = make_deploy_context(ctx)
|
||||||
|
services_list = list(extra_args) or None
|
||||||
|
up_operation(ctx, services_list)
|
||||||
|
|
||||||
|
|
||||||
@command.command()
|
@command.command()
|
||||||
@click.argument('extra_args', nargs=-1) # help: command: down <service1> <service2>
|
@click.argument('extra_args', nargs=-1) # help: command: down <service1> <service2>
|
||||||
@click.pass_context
|
@click.pass_context
|
||||||
def down(ctx, extra_args):
|
def down(ctx, extra_args):
|
||||||
# Get the stack config file name
|
# Get the stack config file name
|
||||||
stack_file_path = ctx.obj.dir.joinpath("stack.yml")
|
# TODO: add cluster name and env file here
|
||||||
|
ctx.obj = make_deploy_context(ctx)
|
||||||
|
down_operation(ctx, extra_args, None)
|
||||||
|
|
||||||
|
|
||||||
|
# stop is the preferred alias for down
|
||||||
|
@command.command()
|
||||||
|
@click.argument('extra_args', nargs=-1) # help: command: down <service1> <service2>
|
||||||
|
@click.pass_context
|
||||||
|
def stop(ctx, extra_args):
|
||||||
# TODO: add cluster name and env file here
|
# TODO: add cluster name and env file here
|
||||||
ctx.obj = make_deploy_context(ctx)
|
ctx.obj = make_deploy_context(ctx)
|
||||||
down_operation(ctx, extra_args, None)
|
down_operation(ctx, extra_args, None)
|
||||||
@ -77,13 +99,6 @@ def ps(ctx):
|
|||||||
ps_operation(ctx)
|
ps_operation(ctx)
|
||||||
|
|
||||||
|
|
||||||
@command.command()
|
|
||||||
@click.pass_context
|
|
||||||
def logs(ctx):
|
|
||||||
ctx.obj = make_deploy_context(ctx)
|
|
||||||
print(f"Context: {ctx.parent.obj}")
|
|
||||||
|
|
||||||
|
|
||||||
@command.command()
|
@command.command()
|
||||||
@click.argument('extra_args', nargs=-1) # help: command: port <service1> <service2>
|
@click.argument('extra_args', nargs=-1) # help: command: port <service1> <service2>
|
||||||
@click.pass_context
|
@click.pass_context
|
||||||
@ -107,23 +122,12 @@ def logs(ctx, extra_args):
|
|||||||
logs_operation(ctx, extra_args)
|
logs_operation(ctx, extra_args)
|
||||||
|
|
||||||
|
|
||||||
@command.command()
|
|
||||||
@click.pass_context
|
|
||||||
def task(ctx):
|
|
||||||
print(f"Context: {ctx.parent.obj}")
|
|
||||||
|
|
||||||
|
|
||||||
@command.command()
|
@command.command()
|
||||||
@click.pass_context
|
@click.pass_context
|
||||||
def status(ctx):
|
def status(ctx):
|
||||||
print(f"Context: {ctx.parent.obj}")
|
print(f"Context: {ctx.parent.obj}")
|
||||||
|
|
||||||
|
|
||||||
@command.command()
|
|
||||||
@click.pass_context
|
|
||||||
def reset(ctx):
|
|
||||||
ctx.obj = create_deploy_context(ctx.parent.parent.obj, stack_file_path, None, None, None, None)
|
|
||||||
|
|
||||||
|
|
||||||
#from importlib import resources, util
|
#from importlib import resources, util
|
||||||
# TODO: figure out how to do this dynamically
|
# TODO: figure out how to do this dynamically
|
||||||
|
@ -18,23 +18,56 @@ import os
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from shutil import copyfile, copytree
|
from shutil import copyfile, copytree
|
||||||
import sys
|
import sys
|
||||||
|
import yaml
|
||||||
from .util import get_stack_file_path, get_parsed_deployment_spec, get_parsed_stack_config, global_options
|
from .util import get_stack_file_path, get_parsed_deployment_spec, get_parsed_stack_config, global_options
|
||||||
|
|
||||||
default_spec_file_content = """stack: mainnet-laconic
|
|
||||||
data_dir: /my/path
|
|
||||||
node_name: my-node-name
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
def _make_default_deployment_dir():
|
||||||
def make_default_deployment_dir():
|
|
||||||
return "deployment-001"
|
return "deployment-001"
|
||||||
|
|
||||||
|
|
||||||
|
def _get_compose_file_dir():
|
||||||
|
# TODO: refactor to use common code with deploy command
|
||||||
|
# See: https://stackoverflow.com/questions/25389095/python-get-path-of-root-project-structure
|
||||||
|
data_dir = Path(__file__).absolute().parent.joinpath("data")
|
||||||
|
source_compose_dir = data_dir.joinpath("compose")
|
||||||
|
return source_compose_dir
|
||||||
|
|
||||||
|
|
||||||
|
def _get_named_volumes(stack):
|
||||||
|
# Parse the compose files looking for named volumes
|
||||||
|
named_volumes = []
|
||||||
|
parsed_stack = get_parsed_stack_config(stack)
|
||||||
|
pods = parsed_stack["pods"]
|
||||||
|
for pod in pods:
|
||||||
|
pod_file_path = os.path.join(_get_compose_file_dir(), f"docker-compose-{pod}.yml")
|
||||||
|
parsed_pod_file = yaml.safe_load(open(pod_file_path, "r"))
|
||||||
|
if "volumes" in parsed_pod_file:
|
||||||
|
volumes = parsed_pod_file["volumes"]
|
||||||
|
for volume in volumes.keys():
|
||||||
|
# Volume definition looks like:
|
||||||
|
# 'laconicd-data': None
|
||||||
|
named_volumes.append(volume)
|
||||||
|
return named_volumes
|
||||||
|
|
||||||
|
|
||||||
@click.command()
|
@click.command()
|
||||||
@click.option("--output", required=True, help="Write yaml spec file here")
|
@click.option("--output", required=True, help="Write yaml spec file here")
|
||||||
@click.pass_context
|
@click.pass_context
|
||||||
def init(ctx, output):
|
def init(ctx, output):
|
||||||
|
stack = global_options(ctx).stack
|
||||||
|
verbose = global_options(ctx).verbose
|
||||||
|
spec_file_content = {"stack": stack}
|
||||||
|
if verbose:
|
||||||
|
print(f"Creating spec file for stack: {stack}")
|
||||||
|
named_volumes = _get_named_volumes(stack)
|
||||||
|
if named_volumes:
|
||||||
|
volume_descriptors = {}
|
||||||
|
for named_volume in named_volumes:
|
||||||
|
volume_descriptors[named_volume] = f"./data/{named_volume}"
|
||||||
|
spec_file_content["volumes"] = volume_descriptors
|
||||||
with open(output, "w") as output_file:
|
with open(output, "w") as output_file:
|
||||||
output_file.write(default_spec_file_content)
|
yaml.dump(spec_file_content, output_file)
|
||||||
|
|
||||||
|
|
||||||
@click.command()
|
@click.command()
|
||||||
@ -50,7 +83,7 @@ def create(ctx, spec_file, deployment_dir):
|
|||||||
if global_options(ctx).debug:
|
if global_options(ctx).debug:
|
||||||
print(f"parsed spec: {parsed_spec}")
|
print(f"parsed spec: {parsed_spec}")
|
||||||
if deployment_dir is None:
|
if deployment_dir is None:
|
||||||
deployment_dir = make_default_deployment_dir()
|
deployment_dir = _make_default_deployment_dir()
|
||||||
if os.path.exists(deployment_dir):
|
if os.path.exists(deployment_dir):
|
||||||
print(f"Error: {deployment_dir} already exists")
|
print(f"Error: {deployment_dir} already exists")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
@ -60,14 +93,11 @@ def create(ctx, spec_file, deployment_dir):
|
|||||||
copyfile(stack_file, os.path.join(deployment_dir, os.path.basename(stack_file)))
|
copyfile(stack_file, os.path.join(deployment_dir, os.path.basename(stack_file)))
|
||||||
# Copy the pod files into the deployment dir
|
# Copy the pod files into the deployment dir
|
||||||
pods = parsed_stack['pods']
|
pods = parsed_stack['pods']
|
||||||
# TODO: refactor to use common code with deploy command
|
|
||||||
# See: https://stackoverflow.com/questions/25389095/python-get-path-of-root-project-structure
|
|
||||||
data_dir = Path(__file__).absolute().parent.joinpath("data")
|
|
||||||
source_compose_dir = data_dir.joinpath("compose")
|
|
||||||
destination_compose_dir = os.path.join(deployment_dir, "compose")
|
destination_compose_dir = os.path.join(deployment_dir, "compose")
|
||||||
os.mkdir(destination_compose_dir)
|
os.mkdir(destination_compose_dir)
|
||||||
|
data_dir = Path(__file__).absolute().parent.joinpath("data")
|
||||||
for pod in pods:
|
for pod in pods:
|
||||||
pod_file_path = os.path.join(source_compose_dir, f"docker-compose-{pod}.yml")
|
pod_file_path = os.path.join(_get_compose_file_dir(), f"docker-compose-{pod}.yml")
|
||||||
copyfile(pod_file_path, os.path.join(destination_compose_dir, os.path.basename(pod_file_path)))
|
copyfile(pod_file_path, os.path.join(destination_compose_dir, os.path.basename(pod_file_path)))
|
||||||
# Copy the config files for the pod, if any
|
# Copy the config files for the pod, if any
|
||||||
source_config_dir = data_dir.joinpath("config", pod)
|
source_config_dir = data_dir.joinpath("config", pod)
|
||||||
|
Loading…
Reference in New Issue
Block a user