Add support for jobs in stacks

This commit is contained in:
Prathamesh Musale 2025-12-01 20:00:45 +05:30
parent 7acabb0743
commit 04463d22ef
3 changed files with 118 additions and 5 deletions

View File

@ -27,7 +27,7 @@ from stack_orchestrator.opts import opts
from stack_orchestrator.util import (get_stack_path, get_parsed_deployment_spec, get_parsed_stack_config,
global_options, get_yaml, get_pod_list, get_pod_file_path, pod_has_scripts,
get_pod_script_paths, get_plugin_code_paths, error_exit, env_var_map_from_file,
resolve_config_dir)
resolve_config_dir, get_job_list, get_job_file_path)
from stack_orchestrator.deploy.spec import Spec
from stack_orchestrator.deploy.deploy_types import LaconicStackSetupCommand
from stack_orchestrator.deploy.deployer_factory import getDeployerConfigGenerator
@ -540,6 +540,21 @@ def create_operation(deployment_command_context, spec_file, deployment_dir, helm
if os.path.exists(destination_config_dir) and not os.listdir(destination_config_dir):
copytree(source_config_dir, destination_config_dir, dirs_exist_ok=True)
# Copy the job files into the deployment dir (for Docker deployments)
jobs = get_job_list(parsed_stack)
if jobs and not parsed_spec.is_kubernetes_deployment():
destination_compose_jobs_dir = deployment_dir_path.joinpath("compose-jobs")
os.mkdir(destination_compose_jobs_dir)
for job in jobs:
job_file_path = get_job_file_path(stack_name, parsed_stack, job)
if job_file_path and job_file_path.exists():
parsed_job_file = yaml.load(open(job_file_path, "r"))
_fixup_pod_file(parsed_job_file, parsed_spec, destination_compose_dir)
with open(destination_compose_jobs_dir.joinpath("docker-compose-%s.yml" % job), "w") as output_file:
yaml.dump(parsed_job_file, output_file)
if opts.o.debug:
print(f"Copied job compose file: {job}")
# Delegate to the stack's Python code
# The deploy create command doesn't require a --stack argument so we need to insert the
# stack member here.

View File

@ -23,6 +23,8 @@ from stack_orchestrator.util import (
get_parsed_stack_config,
get_pod_list,
get_pod_file_path,
get_job_list,
get_job_file_path,
error_exit
)
from stack_orchestrator.deploy.k8s.helm.kompose_wrapper import (
@ -33,12 +35,46 @@ from stack_orchestrator.deploy.k8s.helm.kompose_wrapper import (
from stack_orchestrator.util import get_yaml
def _post_process_chart(chart_dir: Path, chart_name: str) -> None:
def _wrap_job_templates_with_conditionals(chart_dir: Path, jobs: list) -> None:
"""
Wrap job templates with conditional checks so they are not created by default.
Jobs will only be created when explicitly enabled via --set jobs.<name>.enabled=true
"""
templates_dir = chart_dir / "templates"
if not templates_dir.exists():
return
for job_name in jobs:
# Find job template file (kompose generates <service-name>-job.yaml)
job_template_file = templates_dir / f"{job_name}-job.yaml"
if not job_template_file.exists():
if opts.o.debug:
print(f"Warning: Job template not found: {job_template_file}")
continue
# Read the template content
content = job_template_file.read_text()
# Wrap with conditional (default false)
wrapped_content = f"""{{{{- if .Values.jobs.{job_name}.enabled | default false }}}}
{content}{{{{- end }}}}
"""
# Write back
job_template_file.write_text(wrapped_content)
if opts.o.debug:
print(f"Wrapped job template with conditional: {job_template_file.name}")
def _post_process_chart(chart_dir: Path, chart_name: str, jobs: list) -> None:
"""
Post-process Kompose-generated chart to fix common issues.
Fixes:
1. Chart.yaml name, description and keywords
2. Add conditional wrappers to job templates (default: disabled)
TODO:
- Add defaultMode: 0755 to ConfigMap volumes containing scripts (.sh files)
@ -63,6 +99,10 @@ def _post_process_chart(chart_dir: Path, chart_name: str) -> None:
with open(chart_yaml_path, "w") as f:
yaml.dump(chart_yaml, f)
# Process job templates: wrap with conditionals (default disabled)
if jobs:
_wrap_job_templates_with_conditionals(chart_dir, jobs)
def generate_helm_chart(stack_path: str, spec_file: str, deployment_dir: str = None) -> None:
"""
@ -132,16 +172,20 @@ def generate_helm_chart(stack_path: str, spec_file: str, deployment_dir: str = N
print(f"Copied spec file: {spec_path}")
print(f"Copied stack file: {stack_file_path}")
# 4. Get compose files from stack
# 4. Get compose files from stack (pods + jobs)
pods = get_pod_list(parsed_stack)
if not pods:
error_exit(f"No pods found in stack: {stack_path}")
jobs = get_job_list(parsed_stack)
# Get clean stack name from stack.yml
chart_name = stack_name.replace("_", "-").replace(" ", "-")
if opts.o.debug:
print(f"Found {len(pods)} pod(s) in stack: {pods}")
if jobs:
print(f"Found {len(jobs)} job(s) in stack: {jobs}")
compose_files = []
for pod in pods:
@ -152,6 +196,17 @@ def generate_helm_chart(stack_path: str, spec_file: str, deployment_dir: str = N
if opts.o.debug:
print(f"Found compose file: {pod_file.name}")
# Add job compose files
job_files = []
for job in jobs:
job_file = get_job_file_path(stack_path, parsed_stack, job)
if not job_file.exists():
error_exit(f"Job file not found: {job_file}")
compose_files.append(job_file)
job_files.append(job_file)
if opts.o.debug:
print(f"Found job compose file: {job_file.name}")
try:
version = get_kompose_version()
print(f"Using kompose version: {version}")
@ -175,12 +230,12 @@ def generate_helm_chart(stack_path: str, spec_file: str, deployment_dir: str = N
error_exit(f"Helm chart generation failed: {e}")
# 6. Post-process generated chart
_post_process_chart(chart_dir, chart_name)
_post_process_chart(chart_dir, chart_name, jobs)
# 7. Generate README.md with basic installation instructions
readme_content = f"""# {chart_name} Helm Chart
Generated by laconic-so from stack: `{stack_path}
Generated by laconic-so from stack: `{stack_path}`
## Prerequisites

View File

@ -78,6 +78,22 @@ def get_pod_list(parsed_stack):
return result
def get_job_list(parsed_stack):
# Return list of jobs from stack config, or empty list if no jobs defined
if "jobs" not in parsed_stack:
return []
jobs = parsed_stack["jobs"]
if not jobs:
return []
if type(jobs[0]) is str:
result = jobs
else:
result = []
for job in jobs:
result.append(job["name"])
return result
def get_plugin_code_paths(stack) -> List[Path]:
parsed_stack = get_parsed_stack_config(stack)
pods = parsed_stack["pods"]
@ -119,6 +135,21 @@ def resolve_compose_file(stack, pod_name: str):
return compose_base.joinpath(f"docker-compose-{pod_name}.yml")
# Find a job compose file in compose-jobs directory
def resolve_job_compose_file(stack, job_name: str):
if stack_is_external(stack):
# First try looking in the external stack for the job compose file
compose_jobs_base = Path(stack).parent.parent.joinpath("compose-jobs")
proposed_file = compose_jobs_base.joinpath(f"docker-compose-{job_name}.yml")
if proposed_file.exists():
return proposed_file
# If we don't find it fall through to the internal case
# TODO: Add internal compose-jobs directory support if needed
# For now, jobs are expected to be in external stacks only
compose_jobs_base = Path(stack).parent.parent.joinpath("compose-jobs")
return compose_jobs_base.joinpath(f"docker-compose-{job_name}.yml")
def get_pod_file_path(stack, parsed_stack, pod_name: str):
pods = parsed_stack["pods"]
if type(pods[0]) is str:
@ -131,6 +162,18 @@ def get_pod_file_path(stack, parsed_stack, pod_name: str):
return result
def get_job_file_path(stack, parsed_stack, job_name: str):
if "jobs" not in parsed_stack or not parsed_stack["jobs"]:
return None
jobs = parsed_stack["jobs"]
if type(jobs[0]) is str:
result = resolve_job_compose_file(stack, job_name)
else:
# TODO: Support complex job definitions if needed
result = resolve_job_compose_file(stack, job_name)
return result
def get_pod_script_paths(parsed_stack, pod_name: str):
pods = parsed_stack["pods"]
result = []