Gitea deployment (#568)
* First part of deployments for external repos * Generate deployment dir * Create empty config file * Copy script files into deployment * Run scripts in deployment * Refactor * Integrate external plugins * Remove debug output
This commit is contained in:
parent
5ec98ee9a1
commit
2486003361
@ -2,7 +2,7 @@ version: "1.1"
|
|||||||
name: package-registry
|
name: package-registry
|
||||||
description: "Local Package Registry"
|
description: "Local Package Registry"
|
||||||
repos:
|
repos:
|
||||||
- github.com/cerc-io/hosting
|
- git.vdb.to/cerc-io/hosting
|
||||||
- gitea.com/gitea/act_runner
|
- gitea.com/gitea/act_runner
|
||||||
containers:
|
containers:
|
||||||
- cerc/act-runner
|
- cerc/act-runner
|
||||||
|
@ -20,13 +20,12 @@ import copy
|
|||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from decouple import config
|
|
||||||
from importlib import resources
|
from importlib import resources
|
||||||
import subprocess
|
import subprocess
|
||||||
from python_on_whales import DockerClient, DockerException
|
from python_on_whales import DockerClient, DockerException
|
||||||
import click
|
import click
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from app.util import include_exclude_check, get_parsed_stack_config, global_options2
|
from app.util import include_exclude_check, get_parsed_stack_config, global_options2, get_dev_root_path
|
||||||
from app.deploy_types import ClusterContext, DeployCommandContext
|
from app.deploy_types import ClusterContext, DeployCommandContext
|
||||||
from app.deployment_create import create as deployment_create
|
from app.deployment_create import create as deployment_create
|
||||||
from app.deployment_create import init as deployment_init
|
from app.deployment_create import init as deployment_init
|
||||||
@ -235,17 +234,15 @@ def _make_runtime_env(ctx):
|
|||||||
# stack has to be either PathLike pointing to a stack yml file, or a string with the name of a known stack
|
# stack has to be either PathLike pointing to a stack yml file, or a string with the name of a known stack
|
||||||
def _make_cluster_context(ctx, stack, include, exclude, cluster, env_file):
|
def _make_cluster_context(ctx, stack, include, exclude, cluster, env_file):
|
||||||
|
|
||||||
if ctx.local_stack:
|
dev_root_path = get_dev_root_path(ctx)
|
||||||
dev_root_path = os.getcwd()[0:os.getcwd().rindex("stack-orchestrator")]
|
|
||||||
print(f'Local stack dev_root_path (CERC_REPO_BASE_DIR) overridden to: {dev_root_path}')
|
|
||||||
else:
|
|
||||||
dev_root_path = os.path.expanduser(config("CERC_REPO_BASE_DIR", default="~/cerc"))
|
|
||||||
|
|
||||||
# TODO: huge hack, fix this
|
# TODO: huge hack, fix this
|
||||||
# If the caller passed a path for the stack file, then we know that we can get the compose files
|
# If the caller passed a path for the stack file, then we know that we can get the compose files
|
||||||
# from the same directory
|
# from the same directory
|
||||||
|
deployment = False
|
||||||
if isinstance(stack, os.PathLike):
|
if isinstance(stack, os.PathLike):
|
||||||
compose_dir = stack.parent.joinpath("compose")
|
compose_dir = stack.parent.joinpath("compose")
|
||||||
|
deployment = True
|
||||||
else:
|
else:
|
||||||
# See: https://stackoverflow.com/questions/25389095/python-get-path-of-root-project-structure
|
# See: https://stackoverflow.com/questions/25389095/python-get-path-of-root-project-structure
|
||||||
compose_dir = Path(__file__).absolute().parent.joinpath("data", "compose")
|
compose_dir = Path(__file__).absolute().parent.joinpath("data", "compose")
|
||||||
@ -295,6 +292,16 @@ def _make_cluster_context(ctx, stack, include, exclude, cluster, env_file):
|
|||||||
if include_exclude_check(pod_name, include, exclude):
|
if include_exclude_check(pod_name, include, exclude):
|
||||||
if pod_repository is None or pod_repository == "internal":
|
if pod_repository is None or pod_repository == "internal":
|
||||||
compose_file_name = os.path.join(compose_dir, f"docker-compose-{pod_path}.yml")
|
compose_file_name = os.path.join(compose_dir, f"docker-compose-{pod_path}.yml")
|
||||||
|
else:
|
||||||
|
if deployment:
|
||||||
|
compose_file_name = os.path.join(compose_dir, "docker-compose.yml")
|
||||||
|
pod_pre_start_command = pod["pre_start_command"]
|
||||||
|
pod_post_start_command = pod["post_start_command"]
|
||||||
|
script_dir = compose_dir.parent.joinpath("pods", pod_name, "scripts")
|
||||||
|
if pod_pre_start_command is not None:
|
||||||
|
pre_start_commands.append(os.path.join(script_dir, pod_pre_start_command))
|
||||||
|
if pod_post_start_command is not None:
|
||||||
|
post_start_commands.append(os.path.join(script_dir, pod_post_start_command))
|
||||||
else:
|
else:
|
||||||
pod_root_dir = os.path.join(dev_root_path, pod_repository.split("/")[-1], pod["path"])
|
pod_root_dir = os.path.join(dev_root_path, pod_repository.split("/")[-1], pod["path"])
|
||||||
compose_file_name = os.path.join(pod_root_dir, "docker-compose.yml")
|
compose_file_name = os.path.join(pod_root_dir, "docker-compose.yml")
|
||||||
|
@ -16,14 +16,14 @@
|
|||||||
import os
|
import os
|
||||||
from typing import List
|
from typing import List
|
||||||
from app.deploy_types import DeployCommandContext, VolumeMapping
|
from app.deploy_types import DeployCommandContext, VolumeMapping
|
||||||
from app.util import get_parsed_stack_config, get_yaml, get_compose_file_dir
|
from app.util import get_parsed_stack_config, get_yaml, get_compose_file_dir, get_pod_list
|
||||||
|
|
||||||
|
|
||||||
def _container_image_from_service(stack: str, service: str):
|
def _container_image_from_service(stack: str, service: str):
|
||||||
# Parse the compose files looking for the image name of the specified service
|
# Parse the compose files looking for the image name of the specified service
|
||||||
image_name = None
|
image_name = None
|
||||||
parsed_stack = get_parsed_stack_config(stack)
|
parsed_stack = get_parsed_stack_config(stack)
|
||||||
pods = parsed_stack["pods"]
|
pods = get_pod_list(parsed_stack)
|
||||||
yaml = get_yaml()
|
yaml = get_yaml()
|
||||||
for pod in pods:
|
for pod in pods:
|
||||||
pod_file_path = os.path.join(get_compose_file_dir(), f"docker-compose-{pod}.yml")
|
pod_file_path = os.path.join(get_compose_file_dir(), f"docker-compose-{pod}.yml")
|
||||||
|
@ -17,12 +17,13 @@ import click
|
|||||||
from importlib import util
|
from importlib import util
|
||||||
import os
|
import os
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
from typing import List
|
||||||
import random
|
import random
|
||||||
from shutil import copyfile, copytree
|
from shutil import copy, copyfile, copytree
|
||||||
import sys
|
import sys
|
||||||
from app.util import get_stack_file_path, get_parsed_deployment_spec, get_parsed_stack_config, global_options, get_yaml
|
from app.util import (get_stack_file_path, get_parsed_deployment_spec, get_parsed_stack_config, global_options, get_yaml,
|
||||||
from app.util import get_compose_file_dir
|
get_pod_list, get_pod_file_path, pod_has_scripts, get_pod_script_paths, get_plugin_code_path)
|
||||||
from app.deploy_types import DeploymentContext, LaconicStackSetupCommand
|
from app.deploy_types import DeploymentContext, DeployCommandContext, LaconicStackSetupCommand
|
||||||
|
|
||||||
|
|
||||||
def _make_default_deployment_dir():
|
def _make_default_deployment_dir():
|
||||||
@ -32,10 +33,10 @@ def _make_default_deployment_dir():
|
|||||||
def _get_ports(stack):
|
def _get_ports(stack):
|
||||||
ports = {}
|
ports = {}
|
||||||
parsed_stack = get_parsed_stack_config(stack)
|
parsed_stack = get_parsed_stack_config(stack)
|
||||||
pods = parsed_stack["pods"]
|
pods = get_pod_list(parsed_stack)
|
||||||
yaml = get_yaml()
|
yaml = get_yaml()
|
||||||
for pod in pods:
|
for pod in pods:
|
||||||
pod_file_path = os.path.join(get_compose_file_dir(), f"docker-compose-{pod}.yml")
|
pod_file_path = get_pod_file_path(parsed_stack, pod)
|
||||||
parsed_pod_file = yaml.load(open(pod_file_path, "r"))
|
parsed_pod_file = yaml.load(open(pod_file_path, "r"))
|
||||||
if "services" in parsed_pod_file:
|
if "services" in parsed_pod_file:
|
||||||
for svc_name, svc in parsed_pod_file["services"].items():
|
for svc_name, svc in parsed_pod_file["services"].items():
|
||||||
@ -49,10 +50,10 @@ def _get_named_volumes(stack):
|
|||||||
# Parse the compose files looking for named volumes
|
# Parse the compose files looking for named volumes
|
||||||
named_volumes = []
|
named_volumes = []
|
||||||
parsed_stack = get_parsed_stack_config(stack)
|
parsed_stack = get_parsed_stack_config(stack)
|
||||||
pods = parsed_stack["pods"]
|
pods = get_pod_list(parsed_stack)
|
||||||
yaml = get_yaml()
|
yaml = get_yaml()
|
||||||
for pod in pods:
|
for pod in pods:
|
||||||
pod_file_path = os.path.join(get_compose_file_dir(), f"docker-compose-{pod}.yml")
|
pod_file_path = get_pod_file_path(parsed_stack, pod)
|
||||||
parsed_pod_file = yaml.load(open(pod_file_path, "r"))
|
parsed_pod_file = yaml.load(open(pod_file_path, "r"))
|
||||||
if "volumes" in parsed_pod_file:
|
if "volumes" in parsed_pod_file:
|
||||||
volumes = parsed_pod_file["volumes"]
|
volumes = parsed_pod_file["volumes"]
|
||||||
@ -105,11 +106,16 @@ def _fixup_pod_file(pod, spec, compose_dir):
|
|||||||
pod["services"][container_name]["ports"] = container_ports
|
pod["services"][container_name]["ports"] = container_ports
|
||||||
|
|
||||||
|
|
||||||
|
def _commands_plugin_path(ctx: DeployCommandContext):
|
||||||
|
plugin_path = get_plugin_code_path(ctx.stack)
|
||||||
|
return plugin_path.joinpath("deploy", "commands.py")
|
||||||
|
|
||||||
|
|
||||||
def call_stack_deploy_init(deploy_command_context):
|
def call_stack_deploy_init(deploy_command_context):
|
||||||
# Link with the python file in the stack
|
# Link with the python file in the stack
|
||||||
# Call a function in it
|
# Call a function in it
|
||||||
# If no function found, return None
|
# If no function found, return None
|
||||||
python_file_path = get_stack_file_path(deploy_command_context.stack).parent.joinpath("deploy", "commands.py")
|
python_file_path = _commands_plugin_path(deploy_command_context)
|
||||||
if python_file_path.exists():
|
if python_file_path.exists():
|
||||||
spec = util.spec_from_file_location("commands", python_file_path)
|
spec = util.spec_from_file_location("commands", python_file_path)
|
||||||
imported_stack = util.module_from_spec(spec)
|
imported_stack = util.module_from_spec(spec)
|
||||||
@ -124,7 +130,8 @@ def call_stack_deploy_setup(deploy_command_context, parameters: LaconicStackSetu
|
|||||||
# Link with the python file in the stack
|
# Link with the python file in the stack
|
||||||
# Call a function in it
|
# Call a function in it
|
||||||
# If no function found, return None
|
# If no function found, return None
|
||||||
python_file_path = get_stack_file_path(deploy_command_context.stack).parent.joinpath("deploy", "commands.py")
|
python_file_path = _commands_plugin_path(deploy_command_context)
|
||||||
|
print(f"Path: {python_file_path}")
|
||||||
if python_file_path.exists():
|
if python_file_path.exists():
|
||||||
spec = util.spec_from_file_location("commands", python_file_path)
|
spec = util.spec_from_file_location("commands", python_file_path)
|
||||||
imported_stack = util.module_from_spec(spec)
|
imported_stack = util.module_from_spec(spec)
|
||||||
@ -139,7 +146,7 @@ def call_stack_deploy_create(deployment_context, extra_args):
|
|||||||
# Link with the python file in the stack
|
# Link with the python file in the stack
|
||||||
# Call a function in it
|
# Call a function in it
|
||||||
# If no function found, return None
|
# If no function found, return None
|
||||||
python_file_path = get_stack_file_path(deployment_context.command_context.stack).parent.joinpath("deploy", "commands.py")
|
python_file_path = _commands_plugin_path(deployment_context.command_context)
|
||||||
if python_file_path.exists():
|
if python_file_path.exists():
|
||||||
spec = util.spec_from_file_location("commands", python_file_path)
|
spec = util.spec_from_file_location("commands", python_file_path)
|
||||||
imported_stack = util.module_from_spec(spec)
|
imported_stack = util.module_from_spec(spec)
|
||||||
@ -263,14 +270,21 @@ def init(ctx, config, output, map_ports_to_host):
|
|||||||
|
|
||||||
def _write_config_file(spec_file: Path, config_env_file: Path):
|
def _write_config_file(spec_file: Path, config_env_file: Path):
|
||||||
spec_content = get_parsed_deployment_spec(spec_file)
|
spec_content = get_parsed_deployment_spec(spec_file)
|
||||||
if spec_content["config"]:
|
# Note: we want to write an empty file even if we have no config variables
|
||||||
|
with open(config_env_file, "w") as output_file:
|
||||||
|
if "config" in spec_content and spec_content["config"]:
|
||||||
config_vars = spec_content["config"]
|
config_vars = spec_content["config"]
|
||||||
if config_vars:
|
if config_vars:
|
||||||
with open(config_env_file, "w") as output_file:
|
|
||||||
for variable_name, variable_value in config_vars.items():
|
for variable_name, variable_value in config_vars.items():
|
||||||
output_file.write(f"{variable_name}={variable_value}\n")
|
output_file.write(f"{variable_name}={variable_value}\n")
|
||||||
|
|
||||||
|
|
||||||
|
def _copy_files_to_directory(file_paths: List[Path], directory: Path):
|
||||||
|
for path in file_paths:
|
||||||
|
# Using copy to preserve the execute bit
|
||||||
|
copy(path, os.path.join(directory, os.path.basename(path)))
|
||||||
|
|
||||||
|
|
||||||
@click.command()
|
@click.command()
|
||||||
@click.option("--spec-file", required=True, help="Spec file to use to create this deployment")
|
@click.option("--spec-file", required=True, help="Spec file to use to create this deployment")
|
||||||
@click.option("--deployment-dir", help="Create deployment files in this directory")
|
@click.option("--deployment-dir", help="Create deployment files in this directory")
|
||||||
@ -298,15 +312,19 @@ def create(ctx, spec_file, deployment_dir, network_dir, initial_peers):
|
|||||||
# Copy any config varibles from the spec file into an env file suitable for compose
|
# Copy any config varibles from the spec file into an env file suitable for compose
|
||||||
_write_config_file(spec_file, os.path.join(deployment_dir, "config.env"))
|
_write_config_file(spec_file, os.path.join(deployment_dir, "config.env"))
|
||||||
# Copy the pod files into the deployment dir, fixing up content
|
# Copy the pod files into the deployment dir, fixing up content
|
||||||
pods = parsed_stack['pods']
|
pods = get_pod_list(parsed_stack)
|
||||||
destination_compose_dir = os.path.join(deployment_dir, "compose")
|
destination_compose_dir = os.path.join(deployment_dir, "compose")
|
||||||
os.mkdir(destination_compose_dir)
|
os.mkdir(destination_compose_dir)
|
||||||
|
destination_pods_dir = os.path.join(deployment_dir, "pods")
|
||||||
|
os.mkdir(destination_pods_dir)
|
||||||
data_dir = Path(__file__).absolute().parent.joinpath("data")
|
data_dir = Path(__file__).absolute().parent.joinpath("data")
|
||||||
yaml = get_yaml()
|
yaml = get_yaml()
|
||||||
for pod in pods:
|
for pod in pods:
|
||||||
pod_file_path = os.path.join(get_compose_file_dir(), f"docker-compose-{pod}.yml")
|
pod_file_path = get_pod_file_path(parsed_stack, pod)
|
||||||
parsed_pod_file = yaml.load(open(pod_file_path, "r"))
|
parsed_pod_file = yaml.load(open(pod_file_path, "r"))
|
||||||
extra_config_dirs = _find_extra_config_dirs(parsed_pod_file, pod)
|
extra_config_dirs = _find_extra_config_dirs(parsed_pod_file, pod)
|
||||||
|
destination_pod_dir = os.path.join(destination_pods_dir, pod)
|
||||||
|
os.mkdir(destination_pod_dir)
|
||||||
if global_options(ctx).debug:
|
if global_options(ctx).debug:
|
||||||
print(f"extra config dirs: {extra_config_dirs}")
|
print(f"extra config dirs: {extra_config_dirs}")
|
||||||
_fixup_pod_file(parsed_pod_file, parsed_spec, destination_compose_dir)
|
_fixup_pod_file(parsed_pod_file, parsed_spec, destination_compose_dir)
|
||||||
@ -322,6 +340,12 @@ def create(ctx, spec_file, deployment_dir, network_dir, initial_peers):
|
|||||||
# If the same config dir appears in multiple pods, it may already have been copied
|
# If the same config dir appears in multiple pods, it may already have been copied
|
||||||
if not os.path.exists(destination_config_dir):
|
if not os.path.exists(destination_config_dir):
|
||||||
copytree(source_config_dir, destination_config_dir)
|
copytree(source_config_dir, destination_config_dir)
|
||||||
|
# Copy the script files for the pod, if any
|
||||||
|
if pod_has_scripts(parsed_stack, pod):
|
||||||
|
destination_script_dir = os.path.join(destination_pod_dir, "scripts")
|
||||||
|
os.mkdir(destination_script_dir)
|
||||||
|
script_paths = get_pod_script_paths(parsed_stack, pod)
|
||||||
|
_copy_files_to_directory(script_paths, destination_script_dir)
|
||||||
# Delegate to the stack's Python code
|
# Delegate to the stack's Python code
|
||||||
# The deploy create command doesn't require a --stack argument so we need to insert the
|
# The deploy create command doesn't require a --stack argument so we need to insert the
|
||||||
# stack member here.
|
# stack member here.
|
||||||
|
73
app/util.py
73
app/util.py
@ -13,6 +13,7 @@
|
|||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <http:#www.gnu.org/licenses/>.
|
# along with this program. If not, see <http:#www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from decouple import config
|
||||||
import os.path
|
import os.path
|
||||||
import sys
|
import sys
|
||||||
import ruamel.yaml
|
import ruamel.yaml
|
||||||
@ -37,6 +38,16 @@ def get_stack_file_path(stack):
|
|||||||
return stack_file_path
|
return stack_file_path
|
||||||
|
|
||||||
|
|
||||||
|
def get_dev_root_path(ctx):
|
||||||
|
if ctx and ctx.local_stack:
|
||||||
|
# TODO: This code probably doesn't work
|
||||||
|
dev_root_path = os.getcwd()[0:os.getcwd().rindex("stack-orchestrator")]
|
||||||
|
print(f'Local stack dev_root_path (CERC_REPO_BASE_DIR) overridden to: {dev_root_path}')
|
||||||
|
else:
|
||||||
|
dev_root_path = os.path.expanduser(config("CERC_REPO_BASE_DIR", default="~/cerc"))
|
||||||
|
return dev_root_path
|
||||||
|
|
||||||
|
|
||||||
# Caller can pass either the name of a stack, or a path to a stack file
|
# Caller can pass either the name of a stack, or a path to a stack file
|
||||||
def get_parsed_stack_config(stack):
|
def get_parsed_stack_config(stack):
|
||||||
stack_file_path = stack if isinstance(stack, os.PathLike) else get_stack_file_path(stack)
|
stack_file_path = stack if isinstance(stack, os.PathLike) else get_stack_file_path(stack)
|
||||||
@ -56,6 +67,68 @@ def get_parsed_stack_config(stack):
|
|||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
def get_pod_list(parsed_stack):
|
||||||
|
# Handle both old and new format
|
||||||
|
pods = parsed_stack["pods"]
|
||||||
|
if type(pods[0]) is str:
|
||||||
|
result = pods
|
||||||
|
else:
|
||||||
|
result = []
|
||||||
|
for pod in pods:
|
||||||
|
result.append(pod["name"])
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def get_plugin_code_path(stack):
|
||||||
|
parsed_stack = get_parsed_stack_config(stack)
|
||||||
|
pods = parsed_stack["pods"]
|
||||||
|
# TODO: Hack
|
||||||
|
pod = pods[0]
|
||||||
|
if type(pod) is str:
|
||||||
|
result = get_stack_file_path(stack).parent
|
||||||
|
else:
|
||||||
|
pod_root_dir = os.path.join(get_dev_root_path(None), pod["repository"].split("/")[-1], pod["path"])
|
||||||
|
result = Path(os.path.join(pod_root_dir, "stack"))
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def get_pod_file_path(parsed_stack, pod_name: str):
|
||||||
|
pods = parsed_stack["pods"]
|
||||||
|
if type(pods[0]) is str:
|
||||||
|
result = os.path.join(get_compose_file_dir(), f"docker-compose-{pod_name}.yml")
|
||||||
|
else:
|
||||||
|
for pod in pods:
|
||||||
|
if pod["name"] == pod_name:
|
||||||
|
pod_root_dir = os.path.join(get_dev_root_path(None), pod["repository"].split("/")[-1], pod["path"])
|
||||||
|
result = os.path.join(pod_root_dir, "docker-compose.yml")
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def get_pod_script_paths(parsed_stack, pod_name: str):
|
||||||
|
pods = parsed_stack["pods"]
|
||||||
|
result = []
|
||||||
|
if not type(pods[0]) is str:
|
||||||
|
for pod in pods:
|
||||||
|
if pod["name"] == pod_name:
|
||||||
|
pod_root_dir = os.path.join(get_dev_root_path(None), pod["repository"].split("/")[-1], pod["path"])
|
||||||
|
if "pre_start_command" in pod:
|
||||||
|
result.append(os.path.join(pod_root_dir, pod["pre_start_command"]))
|
||||||
|
if "post_start_command" in pod:
|
||||||
|
result.append(os.path.join(pod_root_dir, pod["post_start_command"]))
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def pod_has_scripts(parsed_stack, pod_name: str):
|
||||||
|
pods = parsed_stack["pods"]
|
||||||
|
if type(pods[0]) is str:
|
||||||
|
result = False
|
||||||
|
else:
|
||||||
|
for pod in pods:
|
||||||
|
if pod["name"] == pod_name:
|
||||||
|
result = "pre_start_command" in pod or "post_start_command" in pod
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
def get_compose_file_dir():
|
def get_compose_file_dir():
|
||||||
# TODO: refactor to use common code with deploy command
|
# TODO: refactor to use common code with deploy command
|
||||||
# See: https://stackoverflow.com/questions/25389095/python-get-path-of-root-project-structure
|
# See: https://stackoverflow.com/questions/25389095/python-get-path-of-root-project-structure
|
||||||
|
Loading…
Reference in New Issue
Block a user