Compare commits

..

5 Commits

10 changed files with 535 additions and 37 deletions

View File

@ -0,0 +1,113 @@
# Helm Chart Generation
Generate Kubernetes Helm charts from stack compose files using Kompose.
## Prerequisites
Install Kompose:
```bash
# Linux
curl -L https://github.com/kubernetes/kompose/releases/download/v1.34.0/kompose-linux-amd64 -o kompose
chmod +x kompose
sudo mv kompose /usr/local/bin/
# macOS
brew install kompose
# Verify
kompose version
```
## Usage
### 1. Create spec file
```bash
laconic-so --stack <stack-name> deploy --deploy-to k8s init \
--kube-config ~/.kube/config \
--output spec.yml
```
### 2. Generate Helm chart
```bash
laconic-so --stack <stack-name> deploy create \
--spec-file spec.yml \
--deployment-dir my-deployment \
--helm-chart
```
### 3. Deploy to Kubernetes
```bash
helm install my-release my-deployment/chart
kubectl get pods -n zenith
```
## Output Structure
```bash
my-deployment/
├── spec.yml # Reference
├── stack.yml # Reference
└── chart/ # Helm chart
├── Chart.yaml
├── README.md
└── templates/
└── *.yaml
```
## Example
```bash
# Generate chart for stage1-zenithd
laconic-so --stack stage1-zenithd deploy --deploy-to k8s init \
--kube-config ~/.kube/config \
--output stage1-spec.yml
laconic-so --stack stage1-zenithd deploy create \
--spec-file stage1-spec.yml \
--deployment-dir stage1-deployment \
--helm-chart
# Deploy
helm install stage1-zenithd stage1-deployment/chart
```
## Production Deployment (TODO)
### Local Development
```bash
# Access services using port-forward
kubectl port-forward service/zenithd 26657:26657
kubectl port-forward service/nginx-api-proxy 1317:80
kubectl port-forward service/cosmos-explorer 4173:4173
```
### Production Access Options
- Option 1: Ingress + cert-manager (Recommended)
- Install ingress-nginx + cert-manager
- Point DNS to cluster LoadBalancer IP
- Auto-provisions Let's Encrypt TLS certs
- Access: `https://api.zenith.example.com`
- Option 2: Cloud LoadBalancer
- Use cloud provider's LoadBalancer service type
- Point DNS to assigned external IP
- Manual TLS cert management
- Option 3: Bare Metal (MetalLB + Ingress)
- MetalLB provides LoadBalancer IPs from local network
- Same Ingress setup as cloud
- Option 4: NodePort + External Proxy
- Expose services on 30000-32767 range
- External nginx/Caddy proxies 80/443 → NodePort
- Manual cert management
### Changes Needed
- Add Ingress template to charts
- Add TLS configuration to values.yaml
- Document cert-manager setup
- Add production deployment guide

View File

@ -14,6 +14,7 @@
# along with this program. If not, see <http:#www.gnu.org/licenses/>. # along with this program. If not, see <http:#www.gnu.org/licenses/>.
from stack_orchestrator.deploy.deployment_context import DeploymentContext from stack_orchestrator.deploy.deployment_context import DeploymentContext
from ruamel.yaml import YAML
def create(context: DeploymentContext, extra_args): def create(context: DeploymentContext, extra_args):
@ -22,12 +23,17 @@ def create(context: DeploymentContext, extra_args):
# deterministic-deployment-proxy contract, which itself is a prereq for Optimism contract deployment # deterministic-deployment-proxy contract, which itself is a prereq for Optimism contract deployment
fixturenet_eth_compose_file = context.deployment_dir.joinpath('compose', 'docker-compose-fixturenet-eth.yml') fixturenet_eth_compose_file = context.deployment_dir.joinpath('compose', 'docker-compose-fixturenet-eth.yml')
with open(fixturenet_eth_compose_file, 'r') as yaml_file:
yaml = YAML()
yaml_data = yaml.load(yaml_file)
new_script = '../config/fixturenet-optimism/run-geth.sh:/opt/testnet/run.sh' new_script = '../config/fixturenet-optimism/run-geth.sh:/opt/testnet/run.sh'
def add_geth_volume(yaml_data):
if new_script not in yaml_data['services']['fixturenet-eth-geth-1']['volumes']: if new_script not in yaml_data['services']['fixturenet-eth-geth-1']['volumes']:
yaml_data['services']['fixturenet-eth-geth-1']['volumes'].append(new_script) yaml_data['services']['fixturenet-eth-geth-1']['volumes'].append(new_script)
context.modify_yaml(fixturenet_eth_compose_file, add_geth_volume) with open(fixturenet_eth_compose_file, 'w') as yaml_file:
yaml = YAML()
yaml.dump(yaml_data, yaml_file)
return None return None

View File

@ -2,6 +2,7 @@ version: "1.0"
name: test name: test
description: "A test stack" description: "A test stack"
repos: repos:
- git.vdb.to/cerc-io/laconicd
- git.vdb.to/cerc-io/test-project@test-branch - git.vdb.to/cerc-io/test-project@test-branch
containers: containers:
- cerc/test-container - cerc/test-container

View File

@ -45,14 +45,11 @@ class DeploymentContext:
def get_compose_dir(self): def get_compose_dir(self):
return self.deployment_dir.joinpath(constants.compose_dir_name) return self.deployment_dir.joinpath(constants.compose_dir_name)
def get_compose_file(self, name: str):
return self.get_compose_dir() / f"docker-compose-{name}.yml"
def get_cluster_id(self): def get_cluster_id(self):
return self.id return self.id
def init(self, dir: Path): def init(self, dir):
self.deployment_dir = dir.absolute() self.deployment_dir = dir
self.spec = Spec() self.spec = Spec()
self.spec.init_from_file(self.get_spec_file()) self.spec.init_from_file(self.get_spec_file())
self.stack = Stack(self.spec.obj["stack"]) self.stack = Stack(self.spec.obj["stack"])
@ -69,19 +66,3 @@ class DeploymentContext:
unique_cluster_descriptor = f"{path},{self.get_stack_file()},None,None" unique_cluster_descriptor = f"{path},{self.get_stack_file()},None,None"
hash = hashlib.md5(unique_cluster_descriptor.encode()).hexdigest()[:16] hash = hashlib.md5(unique_cluster_descriptor.encode()).hexdigest()[:16]
self.id = f"{constants.cluster_name_prefix}{hash}" self.id = f"{constants.cluster_name_prefix}{hash}"
def modify_yaml(self, file_path: Path, modifier_func):
"""
Load a YAML from the deployment, apply a modification function, and write it back.
"""
if not file_path.absolute().is_relative_to(self.deployment_dir):
raise ValueError(f"File is not inside deployment directory: {file_path}")
yaml = get_yaml()
with open(file_path, 'r') as f:
yaml_data = yaml.load(f)
modifier_func(yaml_data)
with open(file_path, 'w') as f:
yaml.dump(yaml_data, f)

View File

@ -443,20 +443,31 @@ def _check_volume_definitions(spec):
@click.command() @click.command()
@click.option("--spec-file", required=True, help="Spec file to use to create this deployment") @click.option("--spec-file", required=True, help="Spec file to use to create this deployment")
@click.option("--deployment-dir", help="Create deployment files in this directory") @click.option("--deployment-dir", help="Create deployment files in this directory")
@click.argument('extra_args', nargs=-1, type=click.UNPROCESSED) @click.option("--helm-chart", is_flag=True, default=False, help="Generate Helm chart instead of deploying (k8s only)")
# TODO: Hack
@click.option("--network-dir", help="Network configuration supplied in this directory")
@click.option("--initial-peers", help="Initial set of persistent peers")
@click.pass_context @click.pass_context
def create(ctx, spec_file, deployment_dir, extra_args): def create(ctx, spec_file, deployment_dir, helm_chart, network_dir, initial_peers):
deployment_command_context = ctx.obj deployment_command_context = ctx.obj
return create_operation(deployment_command_context, spec_file, deployment_dir, extra_args) return create_operation(deployment_command_context, spec_file, deployment_dir, helm_chart, network_dir, initial_peers)
# The init command's implementation is in a separate function so that we can # The init command's implementation is in a separate function so that we can
# call it from other commands, bypassing the click decoration stuff # call it from other commands, bypassing the click decoration stuff
def create_operation(deployment_command_context, spec_file, deployment_dir, extra_args): def create_operation(deployment_command_context, spec_file, deployment_dir, helm_chart, network_dir, initial_peers):
parsed_spec = Spec(os.path.abspath(spec_file), get_parsed_deployment_spec(spec_file)) parsed_spec = Spec(os.path.abspath(spec_file), get_parsed_deployment_spec(spec_file))
_check_volume_definitions(parsed_spec) _check_volume_definitions(parsed_spec)
stack_name = parsed_spec["stack"] stack_name = parsed_spec["stack"]
deployment_type = parsed_spec[constants.deploy_to_key] deployment_type = parsed_spec[constants.deploy_to_key]
# Branch to Helm chart generation flow early if --helm-chart flag is set
if deployment_type == "k8s" and helm_chart:
from stack_orchestrator.deploy.k8s.helm.chart_generator import generate_helm_chart
generate_helm_chart(stack_name, spec_file, deployment_dir)
return # Exit early, completely separate from existing k8s deployment flow
# Existing deployment flow continues unchanged
stack_file = get_stack_path(stack_name).joinpath(constants.stack_file_name) stack_file = get_stack_path(stack_name).joinpath(constants.stack_file_name)
parsed_stack = get_parsed_stack_config(stack_name) parsed_stack = get_parsed_stack_config(stack_name)
if opts.o.debug: if opts.o.debug:
@ -539,7 +550,7 @@ def create_operation(deployment_command_context, spec_file, deployment_dir, extr
deployer_config_generator = getDeployerConfigGenerator(deployment_type, deployment_context) deployer_config_generator = getDeployerConfigGenerator(deployment_type, deployment_context)
# TODO: make deployment_dir_path a Path above # TODO: make deployment_dir_path a Path above
deployer_config_generator.generate(deployment_dir_path) deployer_config_generator.generate(deployment_dir_path)
call_stack_deploy_create(deployment_context, extra_args) call_stack_deploy_create(deployment_context, [network_dir, initial_peers, deployment_command_context])
# TODO: this code should be in the stack .py files but # TODO: this code should be in the stack .py files but

View File

@ -0,0 +1,14 @@
# Copyright © 2025 Vulcanize
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http:#www.gnu.org/licenses/>.

View File

@ -0,0 +1,266 @@
# Copyright © 2025 Vulcanize
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http:#www.gnu.org/licenses/>.
import shutil
from pathlib import Path
from stack_orchestrator import constants
from stack_orchestrator.opts import opts
from stack_orchestrator.util import (
get_stack_path,
get_parsed_stack_config,
get_pod_list,
get_pod_file_path,
error_exit
)
from stack_orchestrator.deploy.k8s.helm.kompose_wrapper import (
check_kompose_available,
get_kompose_version,
convert_to_helm_chart
)
from stack_orchestrator.util import get_yaml
def _post_process_chart(chart_dir: Path, chart_name: str) -> None:
"""
Post-process Kompose-generated chart to fix common issues.
Fixes:
1. Chart.yaml name, description and keywords
TODO:
- Add defaultMode: 0755 to ConfigMap volumes containing scripts (.sh files)
"""
yaml = get_yaml()
# Fix Chart.yaml
chart_yaml_path = chart_dir / "Chart.yaml"
if chart_yaml_path.exists():
chart_yaml = yaml.load(open(chart_yaml_path, "r"))
# Fix name
chart_yaml["name"] = chart_name
# Fix description
chart_yaml["description"] = f"Generated Helm chart for {chart_name} stack"
# Fix keywords
if "keywords" in chart_yaml and isinstance(chart_yaml["keywords"], list):
chart_yaml["keywords"] = [chart_name]
with open(chart_yaml_path, "w") as f:
yaml.dump(chart_yaml, f)
def generate_helm_chart(stack_path: str, spec_file: str, deployment_dir: str = None) -> None:
"""
Generate a self-sufficient Helm chart from stack compose files using Kompose.
Args:
stack_path: Path to the stack directory
spec_file: Path to the deployment spec file
deployment_dir: Optional directory for deployment output
Output structure:
deployment-dir/
spec.yml # Reference
stack.yml # Reference
chart/ # Self-sufficient Helm chart
Chart.yaml
README.md
templates/
*.yaml
TODO: Enhancements:
- Parse generated templates and extract values to values.yaml
- Replace hardcoded image tags with {{ .Values.image.tag }}
- Replace hardcoded PVC sizes with {{ .Values.persistence.size }}
- Convert Deployments to StatefulSets for stateful services (zenithd, postgres)
- Add _helpers.tpl with common label/selector functions
- Embed config files (scripts, templates) into ConfigMap templates
- Generate Secret templates for validator keys with placeholders
- Add init containers for genesis/config setup
- Enhance Chart.yaml with proper metadata (version, description, etc.)
"""
parsed_stack = get_parsed_stack_config(stack_path)
stack_name = parsed_stack.get("name", stack_path)
# 1. Check Kompose availability
if not check_kompose_available():
error_exit("kompose not found in PATH.\n")
# 2. Setup deployment directory
if deployment_dir:
deployment_dir_path = Path(deployment_dir)
else:
deployment_dir_path = Path(f"{stack_name}-deployment")
if deployment_dir_path.exists():
error_exit(f"Deployment directory already exists: {deployment_dir_path}")
if opts.o.debug:
print(f"Creating deployment directory: {deployment_dir_path}")
deployment_dir_path.mkdir(parents=True)
# 3. Copy spec and stack files to deployment directory (for reference)
spec_path = Path(spec_file).resolve()
if not spec_path.exists():
error_exit(f"Spec file not found: {spec_file}")
stack_file_path = get_stack_path(stack_path).joinpath(constants.stack_file_name)
if not stack_file_path.exists():
error_exit(f"Stack file not found: {stack_file_path}")
shutil.copy(spec_path, deployment_dir_path / constants.spec_file_name)
shutil.copy(stack_file_path, deployment_dir_path / constants.stack_file_name)
if opts.o.debug:
print(f"Copied spec file: {spec_path}")
print(f"Copied stack file: {stack_file_path}")
# 4. Get compose files from stack
pods = get_pod_list(parsed_stack)
if not pods:
error_exit(f"No pods found in stack: {stack_path}")
# Get clean stack name from stack.yml
chart_name = stack_name.replace("_", "-").replace(" ", "-")
if opts.o.debug:
print(f"Found {len(pods)} pod(s) in stack: {pods}")
compose_files = []
for pod in pods:
pod_file = get_pod_file_path(stack_path, parsed_stack, pod)
if not pod_file.exists():
error_exit(f"Pod file not found: {pod_file}")
compose_files.append(pod_file)
if opts.o.debug:
print(f"Found compose file: {pod_file.name}")
try:
version = get_kompose_version()
print(f"Using kompose version: {version}")
except Exception as e:
error_exit(f"Failed to get kompose version: {e}")
# 5. Create chart directory and invoke Kompose
chart_dir = deployment_dir_path / "chart"
print(f"Converting {len(compose_files)} compose file(s) to Helm chart using Kompose...")
try:
output = convert_to_helm_chart(
compose_files=compose_files,
output_dir=chart_dir,
chart_name=chart_name
)
if opts.o.debug:
print(f"Kompose output:\n{output}")
except Exception as e:
error_exit(f"Helm chart generation failed: {e}")
# 6. Post-process generated chart
_post_process_chart(chart_dir, chart_name)
# 7. Generate README.md with basic installation instructions
readme_content = f"""# {chart_name} Helm Chart
Generated by laconic-so from stack: `{stack_path}
## Prerequisites
- Kubernetes cluster (v1.27+)
- Helm (v3.12+)
- kubectl configured to access your cluster
## Installation
```bash
# Install the chart
helm install {chart_name} {chart_dir}
# Check deployment status
kubectl get pods
```
## Upgrade
To apply changes made to chart, perform upgrade:
```bash
helm upgrade {chart_name} {chart_dir}
```
## Uninstallation
```bash
helm uninstall {chart_name}
```
## Configuration
The chart was generated from Docker Compose files using Kompose.
### Customization
Edit the generated template files in `templates/` to customize:
- Image repositories and tags
- Resource limits (CPU, memory)
- Persistent volume sizes
- Replica counts
"""
readme_path = chart_dir / "README.md"
readme_path.write_text(readme_content)
if opts.o.debug:
print(f"Generated README: {readme_path}")
# 7. Success message
print(f"\n{'=' * 60}")
print("✓ Helm chart generated successfully!")
print(f"{'=' * 60}")
print("\nChart details:")
print(f" Name: {chart_name}")
print(f" Location: {chart_dir.absolute()}")
print(f" Stack: {stack_path}")
# Count generated files
template_files = list((chart_dir / "templates").glob("*.yaml")) if (chart_dir / "templates").exists() else []
print(f" Files: {len(template_files)} template(s) generated")
print("\nDeployment directory structure:")
print(f" {deployment_dir_path}/")
print(" ├── spec.yml (reference)")
print(" ├── stack.yml (reference)")
print(" └── chart/ (self-sufficient Helm chart)")
print("\nNext steps:")
print(" 1. Review the chart:")
print(f" cd {chart_dir}")
print(" cat Chart.yaml")
print("")
print(" 2. Review generated templates:")
print(" ls templates/")
print("")
print(" 3. Install to Kubernetes:")
print(f" helm install {chart_name} {chart_dir}")
print("")
print(" 4. Check deployment:")
print(" kubectl get pods")
print("")

View File

@ -0,0 +1,109 @@
# Copyright © 2025 Vulcanize
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http:#www.gnu.org/licenses/>.
import subprocess
import shutil
from pathlib import Path
from typing import List
def check_kompose_available() -> bool:
"""Check if kompose binary is available in PATH."""
return shutil.which("kompose") is not None
def get_kompose_version() -> str:
"""
Get the installed kompose version.
Returns:
Version string (e.g., "1.34.0")
Raises:
Exception if kompose is not available
"""
if not check_kompose_available():
raise Exception("kompose not found in PATH")
result = subprocess.run(
["kompose", "version"],
capture_output=True,
text=True,
timeout=10
)
if result.returncode != 0:
raise Exception(f"Failed to get kompose version: {result.stderr}")
# Parse version from output like "1.34.0 (HEAD)"
# Output format: "1.34.0 (HEAD)" or just "1.34.0"
version_line = result.stdout.strip()
version = version_line.split()[0] if version_line else "unknown"
return version
def convert_to_helm_chart(compose_files: List[Path], output_dir: Path, chart_name: str = None) -> str:
"""
Invoke kompose to convert Docker Compose files to a Helm chart.
Args:
compose_files: List of paths to docker-compose.yml files
output_dir: Directory where the Helm chart will be generated
chart_name: Optional name for the chart (defaults to directory name)
Returns:
stdout from kompose command
Raises:
Exception if kompose conversion fails
"""
if not check_kompose_available():
raise Exception(
"kompose not found in PATH. "
"Install from: https://kompose.io/installation/"
)
# Ensure output directory exists
output_dir.mkdir(parents=True, exist_ok=True)
# Build kompose command
cmd = ["kompose", "convert"]
# Add all compose files
for compose_file in compose_files:
if not compose_file.exists():
raise Exception(f"Compose file not found: {compose_file}")
cmd.extend(["-f", str(compose_file)])
# Add chart flag and output directory
cmd.extend(["--chart", "-o", str(output_dir)])
# Execute kompose
result = subprocess.run(
cmd,
capture_output=True,
text=True,
timeout=60
)
if result.returncode != 0:
raise Exception(
f"Kompose conversion failed:\n"
f"Command: {' '.join(cmd)}\n"
f"Error: {result.stderr}"
)
return result.stdout

View File

@ -91,7 +91,9 @@ def create_deployment(ctx, deployment_dir, image, url, kube_config, image_regist
deploy_command_context, deploy_command_context,
spec_file_name, spec_file_name,
deployment_dir, deployment_dir,
False,
None, None,
None
) )
# Fix up the container tag inside the deployment compose file # Fix up the container tag inside the deployment compose file
_fixup_container_tag(deployment_dir, image) _fixup_container_tag(deployment_dir, image)

View File

@ -14,13 +14,8 @@ delete_cluster_exit () {
# Test basic stack-orchestrator deploy # Test basic stack-orchestrator deploy
echo "Running stack-orchestrator deploy test" echo "Running stack-orchestrator deploy test"
# Bit of a hack, test the most recent package
if [ "$1" == "from-path" ]; then TEST_TARGET_SO=$( ls -t1 ./package/laconic-so* | head -1 )
TEST_TARGET_SO="laconic-so"
else
TEST_TARGET_SO=$( ls -t1 ./package/laconic-so* | head -1 )
fi
# Set a non-default repo dir # Set a non-default repo dir
export CERC_REPO_BASE_DIR=~/stack-orchestrator-test/repo-base-dir export CERC_REPO_BASE_DIR=~/stack-orchestrator-test/repo-base-dir
echo "Testing this package: $TEST_TARGET_SO" echo "Testing this package: $TEST_TARGET_SO"