forked from cerc-io/stack-orchestrator
Rename functions to remove compose prefix (#604)
This commit is contained in:
parent
567dadef7d
commit
5b9b12a223
@ -80,7 +80,7 @@ def up_operation(ctx, services_list, stay_attached=False):
|
||||
print(f"Running compose up with container_exec_env: {container_exec_env}, extra_args: {services_list}")
|
||||
for pre_start_command in cluster_context.pre_start_commands:
|
||||
_run_command(global_context, cluster_context.cluster, pre_start_command)
|
||||
deploy_context.deployer.compose_up(detach=not stay_attached, services=services_list)
|
||||
deploy_context.deployer.up(detach=not stay_attached, services=services_list)
|
||||
for post_start_command in cluster_context.post_start_commands:
|
||||
_run_command(global_context, cluster_context.cluster, post_start_command)
|
||||
_orchestrate_cluster_config(global_context, cluster_context.config, deploy_context.deployer, container_exec_env)
|
||||
@ -95,7 +95,7 @@ def down_operation(ctx, delete_volumes, extra_args_list):
|
||||
if extra_args_list:
|
||||
timeout_arg = extra_args_list[0]
|
||||
# Specify shutdown timeout (default 10s) to give services enough time to shutdown gracefully
|
||||
ctx.obj.deployer.compose_down(timeout=timeout_arg, volumes=delete_volumes)
|
||||
ctx.obj.deployer.down(timeout=timeout_arg, volumes=delete_volumes)
|
||||
|
||||
|
||||
def ps_operation(ctx):
|
||||
@ -103,7 +103,7 @@ def ps_operation(ctx):
|
||||
if not global_context.dry_run:
|
||||
if global_context.verbose:
|
||||
print("Running compose ps")
|
||||
container_list = ctx.obj.deployer.compose_ps()
|
||||
container_list = ctx.obj.deployer.ps()
|
||||
if len(container_list) > 0:
|
||||
print("Running containers:")
|
||||
for container in container_list:
|
||||
@ -134,7 +134,7 @@ def port_operation(ctx, extra_args):
|
||||
exposed_port = extra_args_list[1]
|
||||
if global_context.verbose:
|
||||
print(f"Running compose port {service_name} {exposed_port}")
|
||||
mapped_port_data = ctx.obj.deployer.compose_port(service_name, exposed_port)
|
||||
mapped_port_data = ctx.obj.deployer.port(service_name, exposed_port)
|
||||
print(f"{mapped_port_data[0]}:{mapped_port_data[1]}")
|
||||
|
||||
|
||||
@ -151,7 +151,7 @@ def exec_operation(ctx, extra_args):
|
||||
if global_context.verbose:
|
||||
print(f"Running compose exec {service_name} {command_to_exec}")
|
||||
try:
|
||||
ctx.obj.deployer.compose_execute(service_name, command_to_exec, envs=container_exec_env)
|
||||
ctx.obj.deployer.execute(service_name, command_to_exec, envs=container_exec_env)
|
||||
except DeployerException:
|
||||
print("container command returned error exit status")
|
||||
|
||||
@ -163,7 +163,7 @@ def logs_operation(ctx, tail: int, follow: bool, extra_args: str):
|
||||
if global_context.verbose:
|
||||
print("Running compose logs")
|
||||
services_list = extra_args_list if extra_args_list is not None else []
|
||||
logs_stream = ctx.obj.deployer.compose_logs(services=services_list, tail=tail, follow=follow, stream=True)
|
||||
logs_stream = ctx.obj.deployer.logs(services=services_list, tail=tail, follow=follow, stream=True)
|
||||
for stream_type, stream_content in logs_stream:
|
||||
print(stream_content.decode("utf-8"), end="")
|
||||
|
||||
@ -224,7 +224,7 @@ def get_stack_status(ctx, stack):
|
||||
# TODO: refactor to avoid duplicating this code above
|
||||
if ctx.verbose:
|
||||
print("Running compose ps")
|
||||
container_list = deployer.compose_ps()
|
||||
container_list = deployer.ps()
|
||||
if len(container_list) > 0:
|
||||
if ctx.debug:
|
||||
print(f"Container list from compose ps: {container_list}")
|
||||
@ -396,7 +396,7 @@ def _orchestrate_cluster_config(ctx, cluster_config, deployer, container_exec_en
|
||||
# TODO: fix the script paths so they're consistent between containers
|
||||
source_value = None
|
||||
try:
|
||||
source_value = deployer.compose_execute(pd.source_container,
|
||||
source_value = deployer.execute(pd.source_container,
|
||||
["sh", "-c",
|
||||
"sh /docker-entrypoint-scripts.d/export-"
|
||||
f"{pd.source_variable}.sh"],
|
||||
@ -417,7 +417,7 @@ def _orchestrate_cluster_config(ctx, cluster_config, deployer, container_exec_en
|
||||
if source_value:
|
||||
if ctx.debug:
|
||||
print(f"fetched source value: {source_value}")
|
||||
destination_output = deployer.compose_execute(pd.destination_container,
|
||||
destination_output = deployer.execute(pd.destination_container,
|
||||
["sh", "-c",
|
||||
f"sh /scripts/import-{pd.destination_variable}.sh"
|
||||
f" {source_value}"],
|
||||
|
@ -24,37 +24,37 @@ class DockerDeployer(Deployer):
|
||||
self.docker = DockerClient(compose_files=compose_files, compose_project_name=compose_project_name,
|
||||
compose_env_file=compose_env_file)
|
||||
|
||||
def compose_up(self, detach, services):
|
||||
def up(self, detach, services):
|
||||
try:
|
||||
return self.docker.compose.up(detach=detach, services=services)
|
||||
except DockerException as e:
|
||||
raise DeployerException(e)
|
||||
|
||||
def compose_down(self, timeout, volumes):
|
||||
def down(self, timeout, volumes):
|
||||
try:
|
||||
return self.docker.compose.down(timeout=timeout, volumes=volumes)
|
||||
except DockerException as e:
|
||||
raise DeployerException(e)
|
||||
|
||||
def compose_ps(self):
|
||||
def ps(self):
|
||||
try:
|
||||
return self.docker.compose.ps()
|
||||
except DockerException as e:
|
||||
raise DeployerException(e)
|
||||
|
||||
def compose_port(self, service, private_port):
|
||||
def port(self, service, private_port):
|
||||
try:
|
||||
return self.docker.compose.port(service=service, private_port=private_port)
|
||||
except DockerException as e:
|
||||
raise DeployerException(e)
|
||||
|
||||
def compose_execute(self, service, command, envs):
|
||||
def execute(self, service, command, envs):
|
||||
try:
|
||||
return self.docker.compose.execute(service=service, command=command, envs=envs)
|
||||
except DockerException as e:
|
||||
raise DeployerException(e)
|
||||
|
||||
def compose_logs(self, services, tail, follow, stream):
|
||||
def logs(self, services, tail, follow, stream):
|
||||
try:
|
||||
return self.docker.compose.logs(services=services, tail=tail, follow=follow, stream=stream)
|
||||
except DockerException as e:
|
||||
|
@ -24,22 +24,22 @@ class K8sDeployer(Deployer):
|
||||
config.load_kube_config()
|
||||
self.client = client.CoreV1Api()
|
||||
|
||||
def compose_up(self, detach, services):
|
||||
def up(self, detach, services):
|
||||
pass
|
||||
|
||||
def compose_down(self, timeout, volumes):
|
||||
def down(self, timeout, volumes):
|
||||
pass
|
||||
|
||||
def compose_ps(self):
|
||||
def ps(self):
|
||||
pass
|
||||
|
||||
def compose_port(self, service, private_port):
|
||||
def port(self, service, private_port):
|
||||
pass
|
||||
|
||||
def compose_execute(self, service_name, command, envs):
|
||||
def execute(self, service_name, command, envs):
|
||||
pass
|
||||
|
||||
def compose_logs(self, services, tail, follow, stream):
|
||||
def logs(self, services, tail, follow, stream):
|
||||
pass
|
||||
|
||||
def run(self, image, command, user, volumes, entrypoint=None):
|
||||
|
@ -19,27 +19,27 @@ from abc import ABC, abstractmethod
|
||||
class Deployer(ABC):
|
||||
|
||||
@abstractmethod
|
||||
def compose_up(self, detach, services):
|
||||
def up(self, detach, services):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def compose_down(self, timeout, volumes):
|
||||
def down(self, timeout, volumes):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def compose_ps(self):
|
||||
def ps(self):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def compose_port(self, service, private_port):
|
||||
def port(self, service, private_port):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def compose_execute(self, service_name, command, envs):
|
||||
def execute(self, service_name, command, envs):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def compose_logs(self, services, tail, follow, stream):
|
||||
def logs(self, services, tail, follow, stream):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
|
Loading…
Reference in New Issue
Block a user