Additional refactoring

This commit is contained in:
David Boreham 2023-06-19 20:54:18 -06:00
parent 08cc9868b9
commit e320f8dc64
7 changed files with 223 additions and 44 deletions

View File

@ -0,0 +1,30 @@
services:
laconicd:
restart: unless-stopped
image: cerc/laconicd:local
command: ["sh", "/docker-entrypoint-scripts.d/create-fixturenet.sh"]
volumes:
# The cosmos-sdk node's database directory:
- laconicd-data:/root/.laconicd/data
# TODO: look at folding these scripts into the container
- ../config/fixturenet-laconicd/create-fixturenet.sh:/docker-entrypoint-scripts.d/create-fixturenet.sh
- ../config/fixturenet-laconicd/export-mykey.sh:/docker-entrypoint-scripts.d/export-mykey.sh
- ../config/fixturenet-laconicd/export-myaddress.sh:/docker-entrypoint-scripts.d/export-myaddress.sh
# TODO: determine which of the ports below is really needed
ports:
- "6060"
- "26657"
- "26656"
- "9473:9473"
- "8545"
- "8546"
- "9090"
- "9091"
- "1317"
cli:
image: cerc/laconic-registry-cli:local
volumes:
- ../config/fixturenet-laconicd/registry-cli-config-template.yml:/registry-cli-config-template.yml
volumes:
laconicd-data

View File

@ -17,8 +17,10 @@ import click
import os import os
from shutil import copyfile from shutil import copyfile
import sys import sys
from .util import get_stack_config_filename, get_parsed_deployment_spec
default_spec_file_content = """data_dir: /my/path default_spec_file_content = """stack: mainnet-laconic
data_dir: /my/path
node_name: my-node-name node_name: my-node-name
""" """
@ -39,12 +41,17 @@ def init(ctx, output):
@click.option("--deployment-dir", help="Create deployment files in this directory") @click.option("--deployment-dir", help="Create deployment files in this directory")
@click.pass_context @click.pass_context
def create(ctx, spec_file, deployment_dir): def create(ctx, spec_file, deployment_dir):
# TODO: check spec-file exists and is readable # This function fails with a useful error message if the file doens't exist
parsed_spec = get_parsed_deployment_spec(spec_file)
if ctx.debug:
print(f"parsed spec: {parsed_spec}")
if deployment_dir is None: if deployment_dir is None:
deployment_dir = make_default_deployment_dir() deployment_dir = make_default_deployment_dir()
if os.path.exists(deployment_dir): if os.path.exists(deployment_dir):
print(f"Error: {deployment_dir} already exists") print(f"Error: {deployment_dir} already exists")
sys.exit(1) sys.exit(1)
os.mkdir(deployment_dir) os.mkdir(deployment_dir)
# Copy spec file into the deployment dir # Copy spec file and the stack file into the deployment dir
copyfile(spec_file, os.path.join(deployment_dir, os.path.basename(spec_file))) copyfile(spec_file, os.path.join(deployment_dir, os.path.basename(spec_file)))
stack_file = get_stack_config_filename(parsed_spec.stack)
copyfile(stack_file, os.path.join(deployment_dir, os.path.basename(stack_file)))

View File

@ -24,9 +24,8 @@ containers:
- cerc/laconic-console-host - cerc/laconic-console-host
pods: pods:
- mainnet-laconicd - mainnet-laconicd
- laconic-console - fixturenet-laconic-console
config: config:
cli: cli:
key: laconicd.mykey key: laconicd.mykey
address: laconicd.myaddress address: laconicd.myaddress

View File

@ -21,12 +21,14 @@ import os
import sys import sys
from dataclasses import dataclass from dataclasses import dataclass
from decouple import config from decouple import config
from importlib import resources
import subprocess import subprocess
from python_on_whales import DockerClient, DockerException from python_on_whales import DockerClient, DockerException
import click import click
from importlib import resources, util
from pathlib import Path from pathlib import Path
from .util import include_exclude_check, get_parsed_stack_config from .util import include_exclude_check, get_parsed_stack_config, global_options2
from .deployment_create import create as deployment_create
from .deployment_create import init as deployment_init
class DeployCommandContext(object): class DeployCommandContext(object):
@ -44,35 +46,44 @@ class DeployCommandContext(object):
def command(ctx, include, exclude, env_file, cluster): def command(ctx, include, exclude, env_file, cluster):
'''deploy a stack''' '''deploy a stack'''
cluster_context = _make_cluster_context(ctx.obj, include, exclude, cluster, env_file) if ctx.parent.obj.debug:
print(f"ctx.parent.obj: {ctx.parent.obj}")
ctx.obj = create_deploy_context(global_options2(ctx), global_options2(ctx).stack, include, exclude, cluster, env_file)
# Subcommand is executed now, by the magic of click
def create_deploy_context(global_context, stack, include, exclude, cluster, env_file):
cluster_context = _make_cluster_context(global_context, stack, include, exclude, cluster, env_file)
# See: https://gabrieldemarmiesse.github.io/python-on-whales/sub-commands/compose/ # See: https://gabrieldemarmiesse.github.io/python-on-whales/sub-commands/compose/
docker = DockerClient(compose_files=cluster_context.compose_files, compose_project_name=cluster_context.cluster, docker = DockerClient(compose_files=cluster_context.compose_files, compose_project_name=cluster_context.cluster,
compose_env_file=cluster_context.env_file) compose_env_file=cluster_context.env_file)
return DeployCommandContext(cluster_context, docker)
ctx.obj = DeployCommandContext(cluster_context, docker)
# Subcommand is executed now, by the magic of click def up_operation(ctx, services_list):
global_context = ctx.parent.parent.obj
deploy_context = ctx.obj
if not global_context.dry_run:
cluster_context = deploy_context.cluster_context
container_exec_env = _make_runtime_env(global_context)
for attr, value in container_exec_env.items():
os.environ[attr] = value
if global_context.verbose:
print(f"Running compose up with container_exec_env: {container_exec_env}, extra_args: {services_list}")
for pre_start_command in cluster_context.pre_start_commands:
_run_command(global_context, cluster_context.cluster, pre_start_command)
deploy_context.docker.compose.up(detach=True, services=services_list)
for post_start_command in cluster_context.post_start_commands:
_run_command(global_context, cluster_context.cluster, post_start_command)
_orchestrate_cluster_config(global_context, cluster_context.config, deploy_context.docker, container_exec_env)
@command.command() @command.command()
@click.argument('extra_args', nargs=-1) # help: command: up <service1> <service2> @click.argument('extra_args', nargs=-1) # help: command: up <service1> <service2>
@click.pass_context @click.pass_context
def up(ctx, extra_args): def up(ctx, extra_args):
global_context = ctx.parent.parent.obj
extra_args_list = list(extra_args) or None extra_args_list = list(extra_args) or None
if not global_context.dry_run: up_operation(ctx, extra_args_list)
cluster_context = ctx.obj.cluster_context
container_exec_env = _make_runtime_env(global_context)
for attr, value in container_exec_env.items():
os.environ[attr] = value
if global_context.verbose:
print(f"Running compose up with container_exec_env: {container_exec_env}, extra_args: {extra_args_list}")
for pre_start_command in cluster_context.pre_start_commands:
_run_command(global_context, cluster_context.cluster, pre_start_command)
ctx.obj.docker.compose.up(detach=True, services=extra_args_list)
for post_start_command in cluster_context.post_start_commands:
_run_command(global_context, cluster_context.cluster, post_start_command)
_orchestrate_cluster_config(global_context, cluster_context.config, ctx.obj.docker, container_exec_env)
@command.command() @command.command()
@ -176,7 +187,7 @@ def get_stack_status(ctx, stack):
ctx_copy = copy.copy(ctx) ctx_copy = copy.copy(ctx)
ctx_copy.stack = stack ctx_copy.stack = stack
cluster_context = _make_cluster_context(ctx_copy, None, None, None, None) cluster_context = _make_cluster_context(ctx_copy, stack, None, None, None, None)
docker = DockerClient(compose_files=cluster_context.compose_files, compose_project_name=cluster_context.cluster) docker = DockerClient(compose_files=cluster_context.compose_files, compose_project_name=cluster_context.cluster)
# TODO: refactor to avoid duplicating this code above # TODO: refactor to avoid duplicating this code above
if ctx.verbose: if ctx.verbose:
@ -201,7 +212,8 @@ def _make_runtime_env(ctx):
return container_exec_env return container_exec_env
def _make_cluster_context(ctx, include, exclude, cluster, env_file): # stack has to be either PathLike pointing to a stack yml file, or a string with the name of a known stack
def _make_cluster_context(ctx, stack, include, exclude, cluster, env_file):
if ctx.local_stack: if ctx.local_stack:
dev_root_path = os.getcwd()[0:os.getcwd().rindex("stack-orchestrator")] dev_root_path = os.getcwd()[0:os.getcwd().rindex("stack-orchestrator")]
@ -209,14 +221,20 @@ def _make_cluster_context(ctx, include, exclude, cluster, env_file):
else: else:
dev_root_path = os.path.expanduser(config("CERC_REPO_BASE_DIR", default="~/cerc")) dev_root_path = os.path.expanduser(config("CERC_REPO_BASE_DIR", default="~/cerc"))
# See: https://stackoverflow.com/questions/25389095/python-get-path-of-root-project-structure # TODO: huge hack, fix this
compose_dir = Path(__file__).absolute().parent.joinpath("data", "compose") # If the caller passed a path for the stack file, then we know that we can get the compose files
# from the same directory
if isinstance(stack, os.PathLike):
compose_dir = stack.parent
else:
# See: https://stackoverflow.com/questions/25389095/python-get-path-of-root-project-structure
compose_dir = Path(__file__).absolute().parent.joinpath("data", "compose")
if cluster is None: if cluster is None:
# Create default unique, stable cluster name from confile file path and stack name if provided # Create default unique, stable cluster name from confile file path and stack name if provided
# TODO: change this to the config file path # TODO: change this to the config file path
path = os.path.realpath(sys.argv[0]) path = os.path.realpath(sys.argv[0])
unique_cluster_descriptor = f"{path},{ctx.stack},{include},{exclude}" unique_cluster_descriptor = f"{path},{stack},{include},{exclude}"
if ctx.debug: if ctx.debug:
print(f"pre-hash descriptor: {unique_cluster_descriptor}") print(f"pre-hash descriptor: {unique_cluster_descriptor}")
hash = hashlib.md5(unique_cluster_descriptor.encode()).hexdigest() hash = hashlib.md5(unique_cluster_descriptor.encode()).hexdigest()
@ -230,8 +248,8 @@ def _make_cluster_context(ctx, include, exclude, cluster, env_file):
all_pods = pod_list_file.read().splitlines() all_pods = pod_list_file.read().splitlines()
pods_in_scope = [] pods_in_scope = []
if ctx.stack: if stack:
stack_config = get_parsed_stack_config(ctx.stack) stack_config = get_parsed_stack_config(stack)
# TODO: syntax check the input here # TODO: syntax check the input here
pods_in_scope = stack_config['pods'] pods_in_scope = stack_config['pods']
cluster_config = stack_config['config'] if 'config' in stack_config else None cluster_config = stack_config['config'] if 'config' in stack_config else None
@ -379,11 +397,5 @@ def _orchestrate_cluster_config(ctx, cluster_config, docker, container_exec_env)
print(f"destination output: {destination_output}") print(f"destination output: {destination_output}")
# TODO: figure out how to do this dynamically command.add_command(deployment_init)
stack = "mainnet-laconic" command.add_command(deployment_create)
module_name = "commands"
spec = util.spec_from_file_location(module_name, "./app/data/stacks/" + stack + "/deploy/commands.py")
imported_stack = util.module_from_spec(spec)
spec.loader.exec_module(imported_stack)
command.add_command(imported_stack.init)
command.add_command(imported_stack.create)

View File

@ -14,24 +14,47 @@
# along with this program. If not, see <http:#www.gnu.org/licenses/>. # along with this program. If not, see <http:#www.gnu.org/licenses/>.
import click import click
from dataclasses import dataclass
from pathlib import Path
import sys import sys
from .deploy import up_operation, create_deploy_context
from .util import global_options
@dataclass
class DeploymentContext:
dir: Path
@click.group() @click.group()
@click.option("--dir", required=True, help="path to deployment directory") @click.option("--dir", required=True, help="path to deployment directory")
@click.pass_context @click.pass_context
def command(ctx): def command(ctx, dir):
print(f"Context: {ctx.parent.obj}")
# Check that --stack wasn't supplied # Check that --stack wasn't supplied
if ctx.parent.obj.stack: if ctx.parent.obj.stack:
print("Error: --stack can't be supplied with the deployment command") print("Error: --stack can't be supplied with the deployment command")
sys.exit(1) sys.exit(1)
# Check dir is valid
dir_path = Path(dir)
if not dir_path.exists():
print(f"Error: deployment directory {dir} does not exist")
sys.exit(1)
if not dir_path.is_dir():
print(f"Error: supplied deployment directory path {dir} exists but is a file not a directory")
sys.exit(1)
# Store the deployment context for subcommands
ctx.obj = DeploymentContext(dir_path)
@command.command() @command.command()
@click.argument('extra_args', nargs=-1) # help: command: up <service1> <service2>
@click.pass_context @click.pass_context
def up(ctx): def up(ctx, extra_args):
print(f"Context: {ctx.parent.obj}") print(f"Context: {global_options(ctx)}")
# Get the stack config file name
stack_file_path = ctx.obj.dir.joinpath("stack.yml")
# TODO: add cluster name and env file here
ctx.obj = create_deploy_context(ctx.parent.parent.obj, stack_file_path, None, None, None, None)
services_list = list(extra_args) or None
up_operation(ctx, services_list)
@command.command() @command.command()
@ -62,3 +85,14 @@ def task(ctx):
@click.pass_context @click.pass_context
def status(ctx): def status(ctx):
print(f"Context: {ctx.parent.obj}") print(f"Context: {ctx.parent.obj}")
#from importlib import resources, util
# TODO: figure out how to do this dynamically
#stack = "mainnet-laconic"
#module_name = "commands"
#spec = util.spec_from_file_location(module_name, "./app/data/stacks/" + stack + "/deploy/commands.py")
#imported_stack = util.module_from_spec(spec)
#spec.loader.exec_module(imported_stack)
#command.add_command(imported_stack.init)
#command.add_command(imported_stack.create)

67
app/deployment_create.py Normal file
View File

@ -0,0 +1,67 @@
# Copyright © 2022, 2023 Cerc
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http:#www.gnu.org/licenses/>.
import click
import os
from pathlib import Path
from shutil import copyfile
import sys
from .util import get_stack_config_path, get_parsed_deployment_spec, get_parsed_stack_config, global_options
default_spec_file_content = """stack: mainnet-laconic
data_dir: /my/path
node_name: my-node-name
"""
def make_default_deployment_dir():
return "deployment-001"
@click.command()
@click.option("--output", required=True, help="Write yaml spec file here")
@click.pass_context
def init(ctx, output):
with open(output, "w") as output_file:
output_file.write(default_spec_file_content)
@click.command()
@click.option("--spec-file", required=True, help="Spec file to use to create this deployment")
@click.option("--deployment-dir", help="Create deployment files in this directory")
@click.pass_context
def create(ctx, spec_file, deployment_dir):
# This function fails with a useful error message if the file doens't exist
parsed_spec = get_parsed_deployment_spec(spec_file)
stack_file = get_stack_config_path(parsed_spec['stack'])
parsed_stack = get_parsed_stack_config(stack_file)
if global_options(ctx).debug:
print(f"parsed spec: {parsed_spec}")
if deployment_dir is None:
deployment_dir = make_default_deployment_dir()
if os.path.exists(deployment_dir):
print(f"Error: {deployment_dir} already exists")
sys.exit(1)
os.mkdir(deployment_dir)
# Copy spec file and the stack file into the deployment dir
copyfile(spec_file, os.path.join(deployment_dir, os.path.basename(spec_file)))
copyfile(stack_file, os.path.join(deployment_dir, os.path.basename(stack_file)))
# Copy the pod files into the deployment dir
pods = parsed_stack['pods']
# TODO: refactor to use common code with deploy command
# See: https://stackoverflow.com/questions/25389095/python-get-path-of-root-project-structure
compose_dir = Path(__file__).absolute().parent.joinpath("data", "compose")
for pod in pods:
pod_file_path = os.path.join(compose_dir, f"docker-compose-{pod}.yml")
copyfile(pod_file_path, os.path.join(deployment_dir, os.path.basename(pod_file_path)))

View File

@ -30,10 +30,16 @@ def include_exclude_check(s, include, exclude):
return s not in exclude_list return s not in exclude_list
def get_parsed_stack_config(stack): def get_stack_config_path(stack):
# In order to be compatible with Python 3.8 we need to use this hack to get the path: # In order to be compatible with Python 3.8 we need to use this hack to get the path:
# See: https://stackoverflow.com/questions/25389095/python-get-path-of-root-project-structure # See: https://stackoverflow.com/questions/25389095/python-get-path-of-root-project-structure
stack_file_path = Path(__file__).absolute().parent.joinpath("data", "stacks", stack, "stack.yml") stack_file_path = Path(__file__).absolute().parent.joinpath("data", "stacks", stack, "stack.yml")
return stack_file_path
# Caller can pass either the name of a stack, or a path to a stack file
def get_parsed_stack_config(stack):
stack_file_path = stack if isinstance(stack, os.PathLike) else get_stack_config_path(stack)
try: try:
with stack_file_path: with stack_file_path:
stack_config = yaml.safe_load(open(stack_file_path, "r")) stack_config = yaml.safe_load(open(stack_file_path, "r"))
@ -48,3 +54,27 @@ def get_parsed_stack_config(stack):
print(f"Error: stack: {stack} does not exist") print(f"Error: stack: {stack} does not exist")
print(f"Exiting, error: {error}") print(f"Exiting, error: {error}")
sys.exit(1) sys.exit(1)
def get_parsed_deployment_spec(spec_file):
spec_file_path = Path(spec_file)
try:
with spec_file_path:
deploy_spec = yaml.safe_load(open(spec_file_path, "r"))
return deploy_spec
except FileNotFoundError as error:
# We try here to generate a useful diagnostic error
print(f"Error: spec file: {spec_file_path} does not exist")
print(f"Exiting, error: {error}")
sys.exit(1)
# TODO: this is fragile wrt to the subcommand depth
# See also: https://github.com/pallets/click/issues/108
def global_options(ctx):
return ctx.parent.parent.obj
# TODO: hack
def global_options2(ctx):
return ctx.parent.obj