k8s refactor #595

Merged
telackey merged 9 commits from dboreham/k8s-refactor into main 2023-10-24 20:44:49 +00:00
4 changed files with 57 additions and 22 deletions
Showing only changes of commit e1cdbe76aa - Show all commits

View File

@ -74,7 +74,7 @@ def up_operation(ctx, services_list, stay_attached=False):
print(f"Running compose up with container_exec_env: {container_exec_env}, extra_args: {services_list}")
for pre_start_command in cluster_context.pre_start_commands:
_run_command(global_context, cluster_context.cluster, pre_start_command)
deploy_context.deployer.compose.up(detach=not stay_attached, services=services_list)
deploy_context.deployer.compose_up(detach=not stay_attached, services=services_list)
for post_start_command in cluster_context.post_start_commands:
_run_command(global_context, cluster_context.cluster, post_start_command)
_orchestrate_cluster_config(global_context, cluster_context.config, deploy_context.deployer, container_exec_env)
@ -89,7 +89,7 @@ def down_operation(ctx, delete_volumes, extra_args_list):
if extra_args_list:
timeout_arg = extra_args_list[0]
# Specify shutdown timeout (default 10s) to give services enough time to shutdown gracefully
ctx.obj.deployer.compose.down(timeout=timeout_arg, volumes=delete_volumes)
ctx.obj.deployer.compose_down(timeout=timeout_arg, volumes=delete_volumes)
def ps_operation(ctx):
@ -97,7 +97,7 @@ def ps_operation(ctx):
if not global_context.dry_run:
if global_context.verbose:
print("Running compose ps")
container_list = ctx.obj.deployer.compose.ps()
container_list = ctx.obj.deployer.compose_ps()
if len(container_list) > 0:
print("Running containers:")
for container in container_list:
@ -128,7 +128,7 @@ def port_operation(ctx, extra_args):
exposed_port = extra_args_list[1]
if global_context.verbose:
print(f"Running compose port {service_name} {exposed_port}")
mapped_port_data = ctx.obj.deployer.compose.port(service_name, exposed_port)
mapped_port_data = ctx.obj.deployer.compose_port(service_name, exposed_port)
print(f"{mapped_port_data[0]}:{mapped_port_data[1]}")
@ -145,7 +145,7 @@ def exec_operation(ctx, extra_args):
if global_context.verbose:
print(f"Running compose exec {service_name} {command_to_exec}")
try:
ctx.obj.deployer.compose.execute(service_name, command_to_exec, envs=container_exec_env)
ctx.obj.deployer.compose_execute(service_name, command_to_exec, envs=container_exec_env)
except DeployerException:
print("container command returned error exit status")
@ -157,7 +157,7 @@ def logs_operation(ctx, tail: int, follow: bool, extra_args: str):
if global_context.verbose:
print("Running compose logs")
services_list = extra_args_list if extra_args_list is not None else []
logs_stream = ctx.obj.deployer.compose.logs(services=services_list, tail=tail, follow=follow, stream=True)
logs_stream = ctx.obj.deployer.compose_logs(services=services_list, tail=tail, follow=follow, stream=True)
for stream_type, stream_content in logs_stream:
print(stream_content.decode("utf-8"), end="")
@ -218,7 +218,7 @@ def get_stack_status(ctx, stack):
# TODO: refactor to avoid duplicating this code above
if ctx.verbose:
print("Running compose ps")
container_list = deployer.compose.ps()
container_list = deployer.compose_ps()
if len(container_list) > 0:
if ctx.debug:
print(f"Container list from compose ps: {container_list}")
@ -390,12 +390,12 @@ def _orchestrate_cluster_config(ctx, cluster_config, deployer, container_exec_en
# TODO: fix the script paths so they're consistent between containers
source_value = None
try:
source_value = deployer.compose.execute(pd.source_container,
["sh", "-c",
"sh /docker-entrypoint-scripts.d/export-"
f"{pd.source_variable}.sh"],
tty=False,
envs=container_exec_env)
source_value = deployer.compose_execute(pd.source_container,
["sh", "-c",
"sh /docker-entrypoint-scripts.d/export-"
f"{pd.source_variable}.sh"],
tty=False,
envs=container_exec_env)
except DeployerException as error:
if ctx.debug:
print(f"Docker exception reading config source: {error}")
@ -411,12 +411,12 @@ def _orchestrate_cluster_config(ctx, cluster_config, deployer, container_exec_en
if source_value:
if ctx.debug:
print(f"fetched source value: {source_value}")
destination_output = deployer.compose.execute(pd.destination_container,
["sh", "-c",
f"sh /scripts/import-{pd.destination_variable}.sh"
f" {source_value}"],
tty=False,
envs=container_exec_env)
destination_output = deployer.compose_execute(pd.destination_container,
["sh", "-c",
f"sh /scripts/import-{pd.destination_variable}.sh"
f" {source_value}"],
tty=False,
envs=container_exec_env)
waiting_for_data = False
if ctx.debug:
print(f"destination output: {destination_output}")

View File

@ -14,4 +14,28 @@
# along with this program. If not, see <http:#www.gnu.org/licenses/>.
from python_on_whales import DockerClient, DockerException
from app.deployer import Deployer
class DockerDeployer(Deployer):
def __init__(self, compose_files, compose_project_name, compose_env_file) -> None:
self.docker = DockerClient(compose_files=compose_files, compose_project_name=compose_project_name,
compose_env_file=compose_env_file)
def compose_up(self, detach, services):
return self.docker.compose.up(detach=detach, services=services)
def compose_down(self, timeout, volumes):
return self.docker.compose.down(timeout=timeout, volumes=volumes)
def compose_ps(self):
return self.docker.compose.ps()
def compose_port(self, service, private_port):
return self.docker.compose.port(service=service, private_port=private_port)
def compose_execute(self, service_name, command, envs):
return self.docker.compose.execute(service_name=service_name, command=command, envs=envs)
def compose_logs(self, services, tail, follow, stream):
return self.docker.compose.logs(services=services, tail=tail, follow=follow, stream=stream)

View File

@ -14,4 +14,9 @@
# along with this program. If not, see <http:#www.gnu.org/licenses/>.
from kubernetes import client, config
from app.deployer import Deployer
class K8sDeployer(Deployer):
def __init__(self) -> None:
pass

View File

@ -14,14 +14,20 @@
# along with this program. If not, see <http:#www.gnu.org/licenses/>.
from abc import ABC, abstractmethod
from app.deploy_k8s import K8sDeployer
from app.deploy_docker import DockerDeployer
class Deployer(ABC):
@abstractmethod
def (self, purchase):
def method(self, purchase):
pass
class DeployerException(Exception) {
def getDeployer(compose_files, compose_project_name, compose_env_file):
return DockerDeployer(compose_files, compose_project_name, compose_env_file)
}
class DeployerException(Exception):
pass