Fixes for external stack deployment (#851)

Fixes
- stack path resolution for `build`
- external stack path resolution for deployments
- "extra" config detection
- `deployment ports` command
- `version` command in dist or source install (without build_tag.txt)
- `setup-repos`, so it won't die when an existing repo is not at a branch or exact tag

Used in cerc-io/fixturenet-eth-stacks#14

Reviewed-on: cerc-io/stack-orchestrator#851
Reviewed-by: David Boreham <dboreham@noreply.git.vdb.to>
This commit is contained in:
Roy Crihfield 2024-07-09 15:37:35 +00:00
parent a2d6201be9
commit 36d4969b2d
8 changed files with 79 additions and 73 deletions

View File

@ -4,9 +4,11 @@ with open("README.md", "r", encoding="utf-8") as fh:
long_description = fh.read() long_description = fh.read()
with open("requirements.txt", "r", encoding="utf-8") as fh: with open("requirements.txt", "r", encoding="utf-8") as fh:
requirements = fh.read() requirements = fh.read()
with open("stack_orchestrator/data/version.txt", "r", encoding="utf-8") as fh:
version = fh.readlines()[-1].strip(" \n")
setup( setup(
name='laconic-stack-orchestrator', name='laconic-stack-orchestrator',
version='1.0.12', version=version,
author='Cerc', author='Cerc',
author_email='info@cerc.io', author_email='info@cerc.io',
license='GNU Affero General Public License', license='GNU Affero General Public License',

View File

@ -21,11 +21,6 @@ from stack_orchestrator.util import get_parsed_stack_config, warn_exit
def get_containers_in_scope(stack: str): def get_containers_in_scope(stack: str):
# See: https://stackoverflow.com/a/20885799/1701505
from stack_orchestrator import data
with importlib.resources.open_text(data, "container-image-list.txt") as container_list_file:
all_containers = container_list_file.read().splitlines()
containers_in_scope = [] containers_in_scope = []
if stack: if stack:
stack_config = get_parsed_stack_config(stack) stack_config = get_parsed_stack_config(stack)
@ -33,7 +28,10 @@ def get_containers_in_scope(stack: str):
warn_exit(f"stack {stack} does not define any containers") warn_exit(f"stack {stack} does not define any containers")
containers_in_scope = stack_config['containers'] containers_in_scope = stack_config['containers']
else: else:
containers_in_scope = all_containers # See: https://stackoverflow.com/a/20885799/1701505
from stack_orchestrator import data
with importlib.resources.open_text(data, "container-image-list.txt") as container_list_file:
containers_in_scope = container_list_file.read().splitlines()
if opts.o.verbose: if opts.o.verbose:
print(f'Containers: {containers_in_scope}') print(f'Containers: {containers_in_scope}')

View File

@ -26,8 +26,15 @@ import click
from pathlib import Path from pathlib import Path
from stack_orchestrator import constants from stack_orchestrator import constants
from stack_orchestrator.opts import opts from stack_orchestrator.opts import opts
from stack_orchestrator.util import include_exclude_check, get_parsed_stack_config, global_options2, get_dev_root_path from stack_orchestrator.util import (
from stack_orchestrator.util import resolve_compose_file get_stack_path,
include_exclude_check,
get_parsed_stack_config,
global_options2,
get_dev_root_path,
stack_is_in_deployment,
resolve_compose_file,
)
from stack_orchestrator.deploy.deployer import Deployer, DeployerException from stack_orchestrator.deploy.deployer import Deployer, DeployerException
from stack_orchestrator.deploy.deployer_factory import getDeployer from stack_orchestrator.deploy.deployer_factory import getDeployer
from stack_orchestrator.deploy.deploy_types import ClusterContext, DeployCommandContext from stack_orchestrator.deploy.deploy_types import ClusterContext, DeployCommandContext
@ -60,6 +67,7 @@ def command(ctx, include, exclude, env_file, cluster, deploy_to):
if deploy_to is None: if deploy_to is None:
deploy_to = "compose" deploy_to = "compose"
stack = get_stack_path(stack)
ctx.obj = create_deploy_context(global_options2(ctx), None, stack, include, exclude, cluster, env_file, deploy_to) ctx.obj = create_deploy_context(global_options2(ctx), None, stack, include, exclude, cluster, env_file, deploy_to)
# Subcommand is executed now, by the magic of click # Subcommand is executed now, by the magic of click
@ -274,16 +282,12 @@ def _make_default_cluster_name(deployment, compose_dir, stack, include, exclude)
# stack has to be either PathLike pointing to a stack yml file, or a string with the name of a known stack # stack has to be either PathLike pointing to a stack yml file, or a string with the name of a known stack
def _make_cluster_context(ctx, stack, include, exclude, cluster, env_file): def _make_cluster_context(ctx, stack, include, exclude, cluster, env_file):
dev_root_path = get_dev_root_path(ctx) dev_root_path = get_dev_root_path(ctx)
# TODO: huge hack, fix this # TODO: hack, this should be encapsulated by the deployment context.
# If the caller passed a path for the stack file, then we know that we can get the compose files deployment = stack_is_in_deployment(stack)
# from the same directory if deployment:
deployment = False compose_dir = stack.joinpath("compose")
if isinstance(stack, os.PathLike):
compose_dir = stack.parent.joinpath("compose")
deployment = True
else: else:
# See: https://stackoverflow.com/questions/25389095/python-get-path-of-root-project-structure # See: https://stackoverflow.com/questions/25389095/python-get-path-of-root-project-structure
compose_dir = Path(__file__).absolute().parent.parent.joinpath("data", "compose") compose_dir = Path(__file__).absolute().parent.parent.joinpath("data", "compose")

View File

@ -50,15 +50,15 @@ def command(ctx, dir):
def make_deploy_context(ctx) -> DeployCommandContext: def make_deploy_context(ctx) -> DeployCommandContext:
context: DeploymentContext = ctx.obj context: DeploymentContext = ctx.obj
stack_file_path = context.get_stack_file()
env_file = context.get_env_file() env_file = context.get_env_file()
cluster_name = context.get_cluster_id() cluster_name = context.get_cluster_id()
if constants.deploy_to_key in context.spec.obj: if constants.deploy_to_key in context.spec.obj:
deployment_type = context.spec.obj[constants.deploy_to_key] deployment_type = context.spec.obj[constants.deploy_to_key]
else: else:
deployment_type = constants.compose_deploy_type deployment_type = constants.compose_deploy_type
return create_deploy_context(ctx.parent.parent.obj, context, stack_file_path, None, None, cluster_name, env_file, stack = context.deployment_dir
deployment_type) return create_deploy_context(ctx.parent.parent.obj, context, stack, None, None,
cluster_name, env_file, deployment_type)
@command.command() @command.command()
@ -123,6 +123,7 @@ def push_images(ctx):
@click.argument('extra_args', nargs=-1) # help: command: port <service1> <service2> @click.argument('extra_args', nargs=-1) # help: command: port <service1> <service2>
@click.pass_context @click.pass_context
def port(ctx, extra_args): def port(ctx, extra_args):
ctx.obj = make_deploy_context(ctx)
port_operation(ctx, extra_args) port_operation(ctx, extra_args)

View File

@ -24,7 +24,7 @@ from secrets import token_hex
import sys import sys
from stack_orchestrator import constants from stack_orchestrator import constants
from stack_orchestrator.opts import opts from stack_orchestrator.opts import opts
from stack_orchestrator.util import (get_stack_file_path, get_parsed_deployment_spec, get_parsed_stack_config, from stack_orchestrator.util import (get_stack_path, get_parsed_deployment_spec, get_parsed_stack_config,
global_options, get_yaml, get_pod_list, get_pod_file_path, pod_has_scripts, global_options, get_yaml, get_pod_list, get_pod_file_path, pod_has_scripts,
get_pod_script_paths, get_plugin_code_paths, error_exit, env_var_map_from_file, get_pod_script_paths, get_plugin_code_paths, error_exit, env_var_map_from_file,
resolve_config_dir) resolve_config_dir)
@ -238,6 +238,11 @@ def _find_extra_config_dirs(parsed_pod_file, pod):
config_dir = host_path.split("/")[2] config_dir = host_path.split("/")[2]
if config_dir != pod: if config_dir != pod:
config_dirs.add(config_dir) config_dirs.add(config_dir)
for env_file in service_info.get("env_file", []):
if env_file.startswith("../config"):
config_dir = env_file.split("/")[2]
if config_dir != pod:
config_dirs.add(config_dir)
return config_dirs return config_dirs
@ -454,7 +459,7 @@ def create_operation(deployment_command_context, spec_file, deployment_dir, netw
_check_volume_definitions(parsed_spec) _check_volume_definitions(parsed_spec)
stack_name = parsed_spec["stack"] stack_name = parsed_spec["stack"]
deployment_type = parsed_spec[constants.deploy_to_key] deployment_type = parsed_spec[constants.deploy_to_key]
stack_file = get_stack_file_path(stack_name) stack_file = get_stack_path(stack_name).joinpath(constants.stack_file_name)
parsed_stack = get_parsed_stack_config(stack_name) parsed_stack = get_parsed_stack_config(stack_name)
if opts.o.debug: if opts.o.debug:
print(f"parsed spec: {parsed_spec}") print(f"parsed spec: {parsed_spec}")
@ -467,7 +472,7 @@ def create_operation(deployment_command_context, spec_file, deployment_dir, netw
os.mkdir(deployment_dir_path) os.mkdir(deployment_dir_path)
# Copy spec file and the stack file into the deployment dir # Copy spec file and the stack file into the deployment dir
copyfile(spec_file, deployment_dir_path.joinpath(constants.spec_file_name)) copyfile(spec_file, deployment_dir_path.joinpath(constants.spec_file_name))
copyfile(stack_file, deployment_dir_path.joinpath(os.path.basename(stack_file))) copyfile(stack_file, deployment_dir_path.joinpath(constants.stack_file_name))
_create_deployment_file(deployment_dir_path) _create_deployment_file(deployment_dir_path)
# Copy any config varibles from the spec file into an env file suitable for compose # Copy any config varibles from the spec file into an env file suitable for compose
_write_config_file(spec_file, deployment_dir_path.joinpath(constants.config_file_name)) _write_config_file(spec_file, deployment_dir_path.joinpath(constants.config_file_name))

View File

@ -20,14 +20,12 @@ import os
import sys import sys
from decouple import config from decouple import config
import git import git
from git.exc import GitCommandError
from tqdm import tqdm from tqdm import tqdm
import click import click
import importlib.resources import importlib.resources
from pathlib import Path
import yaml
from stack_orchestrator.constants import stack_file_name
from stack_orchestrator.opts import opts from stack_orchestrator.opts import opts
from stack_orchestrator.util import include_exclude_check, stack_is_external, error_exit, warn_exit from stack_orchestrator.util import get_parsed_stack_config, include_exclude_check, error_exit, warn_exit
class GitProgress(git.RemoteProgress): class GitProgress(git.RemoteProgress):
@ -81,9 +79,13 @@ def _get_repo_current_branch_or_tag(full_filesystem_repo_path):
except TypeError: except TypeError:
# This means that the current ref is not a branch, so possibly a tag # This means that the current ref is not a branch, so possibly a tag
# Let's try to get the tag # Let's try to get the tag
try:
current_repo_branch_or_tag = git.Repo(full_filesystem_repo_path).git.describe("--tags", "--exact-match") current_repo_branch_or_tag = git.Repo(full_filesystem_repo_path).git.describe("--tags", "--exact-match")
# Note that git is assymetric -- the tag you told it to check out may not be the one # Note that git is asymmetric -- the tag you told it to check out may not be the one
# you get back here (if there are multiple tags associated with the same commit) # you get back here (if there are multiple tags associated with the same commit)
except GitCommandError:
# If there is no matching branch or tag checked out, just use the current SHA
current_repo_branch_or_tag = git.Repo(full_filesystem_repo_path).commit("HEAD").hexsha
return current_repo_branch_or_tag, is_branch return current_repo_branch_or_tag, is_branch
@ -102,7 +104,7 @@ def process_repo(pull, check_only, git_ssh, dev_root_path, branches_array, fully
full_filesystem_repo_path full_filesystem_repo_path
) if is_present else (None, None) ) if is_present else (None, None)
if not opts.o.quiet: if not opts.o.quiet:
present_text = f"already exists active {'branch' if is_branch else 'tag'}: {current_repo_branch_or_tag}" if is_present \ present_text = f"already exists active {'branch' if is_branch else 'ref'}: {current_repo_branch_or_tag}" if is_present \
else 'Needs to be fetched' else 'Needs to be fetched'
print(f"Checking: {full_filesystem_repo_path}: {present_text}") print(f"Checking: {full_filesystem_repo_path}: {present_text}")
# Quick check that it's actually a repo # Quick check that it's actually a repo
@ -120,7 +122,7 @@ def process_repo(pull, check_only, git_ssh, dev_root_path, branches_array, fully
origin = git_repo.remotes.origin origin = git_repo.remotes.origin
origin.pull(progress=None if opts.o.quiet else GitProgress()) origin.pull(progress=None if opts.o.quiet else GitProgress())
else: else:
print("skipping pull because this repo checked out a tag") print("skipping pull because this repo is not on a branch")
else: else:
print("(git pull skipped)") print("(git pull skipped)")
if not is_present: if not is_present:
@ -222,19 +224,9 @@ def command(ctx, include, exclude, git_ssh, check_only, pull, branches):
repos_in_scope = [] repos_in_scope = []
if stack: if stack:
if stack_is_external(stack): stack_config = get_parsed_stack_config(stack)
stack_file_path = Path(stack).joinpath(stack_file_name)
else:
# In order to be compatible with Python 3.8 we need to use this hack to get the path:
# See: https://stackoverflow.com/questions/25389095/python-get-path-of-root-project-structure
stack_file_path = Path(__file__).absolute().parent.parent.joinpath("data", "stacks", stack, stack_file_name)
if not stack_file_path.exists():
error_exit(f"stack {stack} does not exist")
with stack_file_path:
stack_config = yaml.safe_load(open(stack_file_path, "r"))
if "repos" not in stack_config or stack_config["repos"] is None: if "repos" not in stack_config or stack_config["repos"] is None:
warn_exit(f"stack {stack} does not define any repositories") warn_exit(f"stack {stack} does not define any repositories")
else:
repos_in_scope = stack_config["repos"] repos_in_scope = stack_config["repos"]
else: else:
repos_in_scope = all_repos repos_in_scope = all_repos

View File

@ -20,6 +20,7 @@ import ruamel.yaml
from pathlib import Path from pathlib import Path
from dotenv import dotenv_values from dotenv import dotenv_values
from typing import Mapping, Set, List from typing import Mapping, Set, List
from stack_orchestrator.constants import stack_file_name, deployment_file_name
def include_exclude_check(s, include, exclude): def include_exclude_check(s, include, exclude):
@ -33,11 +34,14 @@ def include_exclude_check(s, include, exclude):
return s not in exclude_list return s not in exclude_list
def get_stack_file_path(stack): def get_stack_path(stack):
if stack_is_external(stack):
stack_path = Path(stack)
else:
# In order to be compatible with Python 3.8 we need to use this hack to get the path: # In order to be compatible with Python 3.8 we need to use this hack to get the path:
# See: https://stackoverflow.com/questions/25389095/python-get-path-of-root-project-structure # See: https://stackoverflow.com/questions/25389095/python-get-path-of-root-project-structure
stack_file_path = Path(__file__).absolute().parent.joinpath("data", "stacks", stack, "stack.yml") stack_path = Path(__file__).absolute().parent.joinpath("data", "stacks", stack)
return stack_file_path return stack_path
def get_dev_root_path(ctx): def get_dev_root_path(ctx):
@ -52,21 +56,14 @@ def get_dev_root_path(ctx):
# Caller can pass either the name of a stack, or a path to a stack file # Caller can pass either the name of a stack, or a path to a stack file
def get_parsed_stack_config(stack): def get_parsed_stack_config(stack):
stack_file_path = stack if isinstance(stack, os.PathLike) else get_stack_file_path(stack) stack_file_path = get_stack_path(stack).joinpath(stack_file_name)
try: if stack_file_path.exists():
with stack_file_path: return get_yaml().load(open(stack_file_path, "r"))
stack_config = get_yaml().load(open(stack_file_path, "r"))
return stack_config
except FileNotFoundError as error:
# We try here to generate a useful diagnostic error # We try here to generate a useful diagnostic error
# First check if the stack directory is present # First check if the stack directory is present
stack_directory = stack_file_path.parent if stack_file_path.parent.exists():
if os.path.exists(stack_directory): error_exit(f"stack.yml file is missing from stack: {stack}")
print(f"Error: stack.yml file is missing from stack: {stack}") error_exit(f"stack {stack} does not exist")
else:
print(f"Error: stack: {stack} does not exist")
print(f"Exiting, error: {error}")
sys.exit(1)
def get_pod_list(parsed_stack): def get_pod_list(parsed_stack):
@ -87,7 +84,7 @@ def get_plugin_code_paths(stack) -> List[Path]:
result: Set[Path] = set() result: Set[Path] = set()
for pod in pods: for pod in pods:
if type(pod) is str: if type(pod) is str:
result.add(get_stack_file_path(stack).parent) result.add(get_stack_path(stack))
else: else:
pod_root_dir = os.path.join(get_dev_root_path(None), pod["repository"].split("/")[-1], pod["path"]) pod_root_dir = os.path.join(get_dev_root_path(None), pod["repository"].split("/")[-1], pod["path"])
result.add(Path(os.path.join(pod_root_dir, "stack"))) result.add(Path(os.path.join(pod_root_dir, "stack")))
@ -199,6 +196,10 @@ def stack_is_external(stack: str):
return Path(stack).exists() if stack is not None else False return Path(stack).exists() if stack is not None else False
def stack_is_in_deployment(stack: Path):
return stack.joinpath(deployment_file_name).exists()
def get_yaml(): def get_yaml():
# See: https://stackoverflow.com/a/45701840/1701505 # See: https://stackoverflow.com/a/45701840/1701505
yaml = ruamel.yaml.YAML() yaml = ruamel.yaml.YAML()

View File

@ -14,7 +14,7 @@
# along with this program. If not, see <http:#www.gnu.org/licenses/>. # along with this program. If not, see <http:#www.gnu.org/licenses/>.
import click import click
import importlib.resources from importlib import resources, metadata
@click.command() @click.command()
@ -24,8 +24,11 @@ def command(ctx):
# See: https://stackoverflow.com/a/20885799/1701505 # See: https://stackoverflow.com/a/20885799/1701505
from stack_orchestrator import data from stack_orchestrator import data
with importlib.resources.open_text(data, "build_tag.txt") as version_file: if resources.is_resource(data, "build_tag.txt"):
with resources.open_text(data, "build_tag.txt") as version_file:
# TODO: code better version that skips comment lines # TODO: code better version that skips comment lines
version_string = version_file.read().splitlines()[1] version_string = version_file.read().splitlines()[1]
else:
version_string = metadata.version("laconic-stack-orchestrator") + "-unknown"
print(f"Version: {version_string}") print(version_string)