diff --git a/stack_orchestrator/constants.py b/stack_orchestrator/constants.py
index 54cfe355..596b0c1b 100644
--- a/stack_orchestrator/constants.py
+++ b/stack_orchestrator/constants.py
@@ -13,12 +13,16 @@
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see .
+cluster_name_prefix = "laconic-"
stack_file_name = "stack.yml"
spec_file_name = "spec.yml"
config_file_name = "config.env"
+deployment_file_name = "deployment.yml"
+compose_dir_name = "compose"
compose_deploy_type = "compose"
k8s_kind_deploy_type = "k8s-kind"
k8s_deploy_type = "k8s"
+cluster_id_key = "cluster-id"
kube_config_key = "kube-config"
deploy_to_key = "deploy-to"
network_key = "network"
diff --git a/stack_orchestrator/deploy/deploy.py b/stack_orchestrator/deploy/deploy.py
index cd94c0a6..d1b64743 100644
--- a/stack_orchestrator/deploy/deploy.py
+++ b/stack_orchestrator/deploy/deploy.py
@@ -24,6 +24,7 @@ from importlib import resources
import subprocess
import click
from pathlib import Path
+from stack_orchestrator import constants
from stack_orchestrator.opts import opts
from stack_orchestrator.util import include_exclude_check, get_parsed_stack_config, global_options2, get_dev_root_path
from stack_orchestrator.deploy.deployer import Deployer, DeployerException
@@ -71,6 +72,9 @@ def create_deploy_context(
cluster,
env_file,
deploy_to) -> DeployCommandContext:
+ # Extract the cluster name from the deployment, if we have one
+ if deployment_context and cluster is None:
+ cluster = deployment_context.get_cluster_id()
cluster_context = _make_cluster_context(global_context, stack, include, exclude, cluster, env_file)
deployer = getDeployer(deploy_to, deployment_context, compose_files=cluster_context.compose_files,
compose_project_name=cluster_context.cluster,
@@ -260,13 +264,13 @@ def _make_default_cluster_name(deployment, compose_dir, stack, include, exclude)
path = os.path.realpath(os.path.abspath(compose_dir))
else:
path = "internal"
- unique_cluster_descriptor = f"{path},{stack},{include},{exclude}"
- if opts.o.debug:
- print(f"pre-hash descriptor: {unique_cluster_descriptor}")
- hash = hashlib.md5(unique_cluster_descriptor.encode()).hexdigest()[:16]
- cluster = f"laconic-{hash}"
- if opts.o.debug:
- print(f"Using cluster name: {cluster}")
+ unique_cluster_descriptor = f"{path},{stack},{include},{exclude}"
+ if opts.o.debug:
+ print(f"pre-hash descriptor: {unique_cluster_descriptor}")
+ hash = hashlib.md5(unique_cluster_descriptor.encode()).hexdigest()[:16]
+ cluster = f"{constants.cluster_name_prefix}{hash}"
+ if opts.o.debug:
+ print(f"Using cluster name: {cluster}")
return cluster
@@ -288,6 +292,8 @@ def _make_cluster_context(ctx, stack, include, exclude, cluster, env_file):
if cluster is None:
cluster = _make_default_cluster_name(deployment, compose_dir, stack, include, exclude)
+ else:
+ _make_default_cluster_name(deployment, compose_dir, stack, include, exclude)
# See: https://stackoverflow.com/a/20885799/1701505
from stack_orchestrator import data
diff --git a/stack_orchestrator/deploy/deployment.py b/stack_orchestrator/deploy/deployment.py
index 8d74a62d..cc70519e 100644
--- a/stack_orchestrator/deploy/deployment.py
+++ b/stack_orchestrator/deploy/deployment.py
@@ -52,7 +52,7 @@ def make_deploy_context(ctx) -> DeployCommandContext:
context: DeploymentContext = ctx.obj
stack_file_path = context.get_stack_file()
env_file = context.get_env_file()
- cluster_name = context.get_cluster_name()
+ cluster_name = context.get_cluster_id()
if constants.deploy_to_key in context.spec.obj:
deployment_type = context.spec.obj[constants.deploy_to_key]
else:
diff --git a/stack_orchestrator/deploy/deployment_context.py b/stack_orchestrator/deploy/deployment_context.py
index cbee4151..27e32812 100644
--- a/stack_orchestrator/deploy/deployment_context.py
+++ b/stack_orchestrator/deploy/deployment_context.py
@@ -14,15 +14,19 @@
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see .
+import hashlib
+import os
from pathlib import Path
from stack_orchestrator import constants
+from stack_orchestrator.util import get_yaml
from stack_orchestrator.deploy.stack import Stack
from stack_orchestrator.deploy.spec import Spec
class DeploymentContext:
deployment_dir: Path
+ id: str
spec: Spec
stack: Stack
@@ -35,9 +39,14 @@ class DeploymentContext:
def get_env_file(self):
return self.deployment_dir.joinpath(constants.config_file_name)
- # TODO: implement me
- def get_cluster_name(self):
- return None
+ def get_deployment_file(self):
+ return self.deployment_dir.joinpath(constants.deployment_file_name)
+
+ def get_compose_dir(self):
+ return self.deployment_dir.joinpath(constants.compose_dir_name)
+
+ def get_cluster_id(self):
+ return self.id
def init(self, dir):
self.deployment_dir = dir
@@ -45,3 +54,16 @@ class DeploymentContext:
self.spec.init_from_file(self.get_spec_file())
self.stack = Stack(self.spec.obj["stack"])
self.stack.init_from_file(self.get_stack_file())
+ deployment_file_path = self.get_deployment_file()
+ if deployment_file_path.exists():
+ with deployment_file_path:
+ obj = get_yaml().load(open(deployment_file_path, "r"))
+ self.id = obj[constants.cluster_id_key]
+ # Handle the case of a legacy deployment with no file
+ # Code below is intended to match the output from _make_default_cluster_name()
+ # TODO: remove when we no longer need to support legacy deployments
+ else:
+ path = os.path.realpath(os.path.abspath(self.get_compose_dir()))
+ unique_cluster_descriptor = f"{path},{self.get_stack_file()},None,None"
+ hash = hashlib.md5(unique_cluster_descriptor.encode()).hexdigest()[:16]
+ self.id = f"{constants.cluster_name_prefix}{hash}"
diff --git a/stack_orchestrator/deploy/deployment_create.py b/stack_orchestrator/deploy/deployment_create.py
index 88ce0b2a..9eaea30c 100644
--- a/stack_orchestrator/deploy/deployment_create.py
+++ b/stack_orchestrator/deploy/deployment_create.py
@@ -20,6 +20,7 @@ from pathlib import Path
from typing import List
import random
from shutil import copy, copyfile, copytree
+from secrets import token_hex
import sys
from stack_orchestrator import constants
from stack_orchestrator.opts import opts
@@ -276,7 +277,7 @@ def init(ctx, config, config_file, kube_config, image_registry, output, map_port
# call it from other commands, bypassing the click decoration stuff
def init_operation(deploy_command_context, stack, deployer_type, config,
config_file, kube_config, image_registry, output, map_ports_to_host):
- yaml = get_yaml()
+
default_spec_file_content = call_stack_deploy_init(deploy_command_context)
spec_file_content = {"stack": stack, constants.deploy_to_key: deployer_type}
if deployer_type == "k8s":
@@ -311,8 +312,6 @@ def init_operation(deploy_command_context, stack, deployer_type, config,
new_config = config_file_variables
merged_config = {**new_config, **orig_config}
spec_file_content.update({"config": merged_config})
- if opts.o.debug:
- print(f"Creating spec file for stack: {stack} with content: {spec_file_content}")
ports = _get_mapped_ports(stack, map_ports_to_host)
spec_file_content.update({"network": {"ports": ports}})
@@ -324,8 +323,11 @@ def init_operation(deploy_command_context, stack, deployer_type, config,
volume_descriptors[named_volume] = f"./data/{named_volume}"
spec_file_content["volumes"] = volume_descriptors
+ if opts.o.debug:
+ print(f"Creating spec file for stack: {stack} with content: {spec_file_content}")
+
with open(output, "w") as output_file:
- yaml.dump(spec_file_content, output_file)
+ get_yaml().dump(spec_file_content, output_file)
def _write_config_file(spec_file: Path, config_env_file: Path):
@@ -351,6 +353,13 @@ def _copy_files_to_directory(file_paths: List[Path], directory: Path):
copy(path, os.path.join(directory, os.path.basename(path)))
+def _create_deployment_file(deployment_dir: Path):
+ deployment_file_path = deployment_dir.joinpath(constants.deployment_file_name)
+ cluster = f"{constants.cluster_name_prefix}{token_hex(8)}"
+ with open(deployment_file_path, "w") as output_file:
+ output_file.write(f"{constants.cluster_id_key}: {cluster}\n")
+
+
@click.command()
@click.option("--spec-file", required=True, help="Spec file to use to create this deployment")
@click.option("--deployment-dir", help="Create deployment files in this directory")
@@ -383,6 +392,7 @@ def create_operation(deployment_command_context, spec_file, deployment_dir, netw
# Copy spec file and the stack file into the deployment dir
copyfile(spec_file, deployment_dir_path.joinpath(constants.spec_file_name))
copyfile(stack_file, deployment_dir_path.joinpath(os.path.basename(stack_file)))
+ _create_deployment_file(deployment_dir_path)
# Copy any config varibles from the spec file into an env file suitable for compose
_write_config_file(spec_file, deployment_dir_path.joinpath(constants.config_file_name))
# Copy any k8s config file into the deployment dir