Compare commits

..

4 Commits

Author SHA1 Message Date
693259473e Check the deployment dir exists when syncing
All checks were successful
Lint Checks / Run linter (pull_request) Successful in 3m33s
Deploy Test / Run deploy test suite (pull_request) Successful in 8m15s
K8s Deploy Test / Run deploy test suite on kind/k8s (pull_request) Successful in 12m0s
K8s Deployment Control Test / Run deployment control suite on kind/k8s (pull_request) Successful in 13m21s
Webapp Test / Run webapp test suite (pull_request) Successful in 15m5s
Smoke Test / Run basic test suite (pull_request) Successful in 17m54s
2025-10-19 21:14:49 +08:00
d0041bc820 fix test 2025-10-19 19:52:46 +08:00
a96dd88477 misc test improvement 2025-10-19 19:52:33 +08:00
61c18980bc Add --sync option to deploy create
Some checks failed
Lint Checks / Run linter (pull_request) Successful in 15m50s
Deploy Test / Run deploy test suite (pull_request) Failing after 16m46s
K8s Deploy Test / Run deploy test suite on kind/k8s (pull_request) Successful in 20m47s
K8s Deployment Control Test / Run deployment control suite on kind/k8s (pull_request) Successful in 20m54s
Webapp Test / Run webapp test suite (pull_request) Successful in 20m32s
Smoke Test / Run basic test suite (pull_request) Successful in 21m29s
To allow updating an existing deployment
2025-10-19 14:06:18 +08:00
6 changed files with 108 additions and 14 deletions

View File

@ -2,7 +2,6 @@ version: "1.0"
name: test name: test
description: "A test stack" description: "A test stack"
repos: repos:
- git.vdb.to/cerc-io/laconicd
- git.vdb.to/cerc-io/test-project@test-branch - git.vdb.to/cerc-io/test-project@test-branch
containers: containers:
- cerc/test-container - cerc/test-container

View File

@ -443,18 +443,19 @@ def _check_volume_definitions(spec):
@click.command() @click.command()
@click.option("--spec-file", required=True, help="Spec file to use to create this deployment") @click.option("--spec-file", required=True, help="Spec file to use to create this deployment")
@click.option("--deployment-dir", help="Create deployment files in this directory") @click.option("--deployment-dir", help="Create deployment files in this directory")
@click.option("--sync", is_flag=True, default=False, help="Update existing deployment directory without overwriting data volumes")
# TODO: Hack # TODO: Hack
@click.option("--network-dir", help="Network configuration supplied in this directory") @click.option("--network-dir", help="Network configuration supplied in this directory")
@click.option("--initial-peers", help="Initial set of persistent peers") @click.option("--initial-peers", help="Initial set of persistent peers")
@click.pass_context @click.pass_context
def create(ctx, spec_file, deployment_dir, network_dir, initial_peers): def create(ctx, spec_file, deployment_dir, sync, network_dir, initial_peers):
deployment_command_context = ctx.obj deployment_command_context = ctx.obj
return create_operation(deployment_command_context, spec_file, deployment_dir, network_dir, initial_peers) return create_operation(deployment_command_context, spec_file, deployment_dir, sync, network_dir, initial_peers)
# The init command's implementation is in a separate function so that we can # The init command's implementation is in a separate function so that we can
# call it from other commands, bypassing the click decoration stuff # call it from other commands, bypassing the click decoration stuff
def create_operation(deployment_command_context, spec_file, deployment_dir, network_dir, initial_peers): def create_operation(deployment_command_context, spec_file, deployment_dir, sync, network_dir, initial_peers):
parsed_spec = Spec(os.path.abspath(spec_file), get_parsed_deployment_spec(spec_file)) parsed_spec = Spec(os.path.abspath(spec_file), get_parsed_deployment_spec(spec_file))
_check_volume_definitions(parsed_spec) _check_volume_definitions(parsed_spec)
stack_name = parsed_spec["stack"] stack_name = parsed_spec["stack"]
@ -468,12 +469,21 @@ def create_operation(deployment_command_context, spec_file, deployment_dir, netw
else: else:
deployment_dir_path = Path(deployment_dir) deployment_dir_path = Path(deployment_dir)
if deployment_dir_path.exists(): if deployment_dir_path.exists():
error_exit(f"{deployment_dir_path} already exists") if not sync:
os.mkdir(deployment_dir_path) error_exit(f"{deployment_dir_path} already exists")
if opts.o.debug:
print(f"Syncing existing deployment at {deployment_dir_path}")
else:
if sync:
error_exit(f"--sync requires that {deployment_dir_path} already exists")
os.mkdir(deployment_dir_path)
# Copy spec file and the stack file into the deployment dir # Copy spec file and the stack file into the deployment dir
copyfile(spec_file, deployment_dir_path.joinpath(constants.spec_file_name)) copyfile(spec_file, deployment_dir_path.joinpath(constants.spec_file_name))
copyfile(stack_file, deployment_dir_path.joinpath(constants.stack_file_name)) copyfile(stack_file, deployment_dir_path.joinpath(constants.stack_file_name))
_create_deployment_file(deployment_dir_path) # Only create deployment file if it doesn't exist (preserve cluster ID on sync)
deployment_file_path = deployment_dir_path.joinpath(constants.deployment_file_name)
if not deployment_file_path.exists():
_create_deployment_file(deployment_dir_path)
# Copy any config varibles from the spec file into an env file suitable for compose # Copy any config varibles from the spec file into an env file suitable for compose
_write_config_file(spec_file, deployment_dir_path.joinpath(constants.config_file_name)) _write_config_file(spec_file, deployment_dir_path.joinpath(constants.config_file_name))
# Copy any k8s config file into the deployment dir # Copy any k8s config file into the deployment dir
@ -483,16 +493,16 @@ def create_operation(deployment_command_context, spec_file, deployment_dir, netw
# Copy the pod files into the deployment dir, fixing up content # Copy the pod files into the deployment dir, fixing up content
pods = get_pod_list(parsed_stack) pods = get_pod_list(parsed_stack)
destination_compose_dir = deployment_dir_path.joinpath("compose") destination_compose_dir = deployment_dir_path.joinpath("compose")
os.mkdir(destination_compose_dir) os.makedirs(destination_compose_dir, exist_ok=True)
destination_pods_dir = deployment_dir_path.joinpath("pods") destination_pods_dir = deployment_dir_path.joinpath("pods")
os.mkdir(destination_pods_dir) os.makedirs(destination_pods_dir, exist_ok=True)
yaml = get_yaml() yaml = get_yaml()
for pod in pods: for pod in pods:
pod_file_path = get_pod_file_path(stack_name, parsed_stack, pod) pod_file_path = get_pod_file_path(stack_name, parsed_stack, pod)
parsed_pod_file = yaml.load(open(pod_file_path, "r")) parsed_pod_file = yaml.load(open(pod_file_path, "r"))
extra_config_dirs = _find_extra_config_dirs(parsed_pod_file, pod) extra_config_dirs = _find_extra_config_dirs(parsed_pod_file, pod)
destination_pod_dir = destination_pods_dir.joinpath(pod) destination_pod_dir = destination_pods_dir.joinpath(pod)
os.mkdir(destination_pod_dir) os.makedirs(destination_pod_dir, exist_ok=True)
if opts.o.debug: if opts.o.debug:
print(f"extra config dirs: {extra_config_dirs}") print(f"extra config dirs: {extra_config_dirs}")
_fixup_pod_file(parsed_pod_file, parsed_spec, destination_compose_dir) _fixup_pod_file(parsed_pod_file, parsed_spec, destination_compose_dir)
@ -511,7 +521,7 @@ def create_operation(deployment_command_context, spec_file, deployment_dir, netw
# Copy the script files for the pod, if any # Copy the script files for the pod, if any
if pod_has_scripts(parsed_stack, pod): if pod_has_scripts(parsed_stack, pod):
destination_script_dir = destination_pod_dir.joinpath("scripts") destination_script_dir = destination_pod_dir.joinpath("scripts")
os.mkdir(destination_script_dir) os.makedirs(destination_script_dir, exist_ok=True)
script_paths = get_pod_script_paths(parsed_stack, pod) script_paths = get_pod_script_paths(parsed_stack, pod)
_copy_files_to_directory(script_paths, destination_script_dir) _copy_files_to_directory(script_paths, destination_script_dir)
if parsed_spec.is_kubernetes_deployment(): if parsed_spec.is_kubernetes_deployment():

View File

@ -91,6 +91,7 @@ def create_deployment(ctx, deployment_dir, image, url, kube_config, image_regist
deploy_command_context, deploy_command_context,
spec_file_name, spec_file_name,
deployment_dir, deployment_dir,
False,
None, None,
None None
) )

View File

@ -86,7 +86,7 @@ fi
echo "deploy init test: passed" echo "deploy init test: passed"
# Switch to a full path for the data dir so it gets provisioned as a host bind mounted volume and preserved beyond cluster lifetime # Switch to a full path for the data dir so it gets provisioned as a host bind mounted volume and preserved beyond cluster lifetime
sed -i "s|^\(\s*db-data:$\)$|\1 ${test_deployment_dir}/data/db-data|" $test_deployment_spec sed -i.bak "s|^\(\s*db-data:$\)$|\1 ${test_deployment_dir}/data/db-data|" $test_deployment_spec
$TEST_TARGET_SO --stack ${stack} deploy create --spec-file $test_deployment_spec --deployment-dir $test_deployment_dir $TEST_TARGET_SO --stack ${stack} deploy create --spec-file $test_deployment_spec --deployment-dir $test_deployment_dir
# Check the deployment dir exists # Check the deployment dir exists

View File

@ -14,8 +14,13 @@ delete_cluster_exit () {
# Test basic stack-orchestrator deploy # Test basic stack-orchestrator deploy
echo "Running stack-orchestrator deploy test" echo "Running stack-orchestrator deploy test"
# Bit of a hack, test the most recent package
TEST_TARGET_SO=$( ls -t1 ./package/laconic-so* | head -1 ) if [ "$1" == "from-path" ]; then
TEST_TARGET_SO="laconic-so"
else
TEST_TARGET_SO=$( ls -t1 ./package/laconic-so* | head -1 )
fi
# Set a non-default repo dir # Set a non-default repo dir
export CERC_REPO_BASE_DIR=~/stack-orchestrator-test/repo-base-dir export CERC_REPO_BASE_DIR=~/stack-orchestrator-test/repo-base-dir
echo "Testing this package: $TEST_TARGET_SO" echo "Testing this package: $TEST_TARGET_SO"
@ -80,6 +85,7 @@ else
exit 1 exit 1
fi fi
$TEST_TARGET_SO --stack test deploy down --delete-volumes $TEST_TARGET_SO --stack test deploy down --delete-volumes
# Basic test of creating a deployment # Basic test of creating a deployment
test_deployment_dir=$CERC_REPO_BASE_DIR/test-deployment-dir test_deployment_dir=$CERC_REPO_BASE_DIR/test-deployment-dir
test_deployment_spec=$CERC_REPO_BASE_DIR/test-deployment-spec.yml test_deployment_spec=$CERC_REPO_BASE_DIR/test-deployment-spec.yml
@ -117,6 +123,41 @@ fi
echo "dbfc7a4d-44a7-416d-b5f3-29842cc47650" > $test_deployment_dir/data/test-config/test_config echo "dbfc7a4d-44a7-416d-b5f3-29842cc47650" > $test_deployment_dir/data/test-config/test_config
echo "deploy create output file test: passed" echo "deploy create output file test: passed"
# Test sync functionality: update deployment without destroying data
# First, create a marker file in the data directory to verify it's preserved
test_data_marker="$test_deployment_dir/data/test-data-bind/sync-test-marker.txt"
echo "original-data-$(date +%s)" > "$test_data_marker"
original_marker_content=$(<$test_data_marker)
# Also save original spec content to compare
original_spec_content=$(<$test_deployment_spec)
# Modify spec file to simulate an update
sed -i.bak 's/CERC_TEST_PARAM_3:/CERC_TEST_PARAM_3: FASTER/' $test_deployment_spec
# Run sync to update deployment files without destroying data
$TEST_TARGET_SO --stack test deploy create --spec-file $test_deployment_spec --deployment-dir $test_deployment_dir --sync
# Verify the spec file was updated in deployment dir
updated_deployed_spec=$(<$test_deployment_dir/spec.yml)
if [[ "$updated_deployed_spec" == *"FASTER"* ]]; then
echo "deploy sync test: spec file updated"
else
echo "deploy sync test: spec file not updated - FAILED"
exit 1
fi
# Verify the data marker file still exists with original content
if [ ! -f "$test_data_marker" ]; then
echo "deploy sync test: data file deleted - FAILED"
exit 1
fi
synced_marker_content=$(<$test_data_marker)
if [ "$synced_marker_content" == "$original_marker_content" ]; then
echo "deploy sync test: data preserved - passed"
else
echo "deploy sync test: data corrupted - FAILED"
exit 1
fi
echo "deploy sync test: passed"
# Try to start the deployment # Try to start the deployment
$TEST_TARGET_SO deployment --dir $test_deployment_dir start $TEST_TARGET_SO deployment --dir $test_deployment_dir start
# Check logs command works # Check logs command works

View File

@ -125,6 +125,49 @@ fi
echo "dbfc7a4d-44a7-416d-b5f3-29842cc47650" > $test_deployment_dir/data/test-config/test_config echo "dbfc7a4d-44a7-416d-b5f3-29842cc47650" > $test_deployment_dir/data/test-config/test_config
echo "deploy create output file test: passed" echo "deploy create output file test: passed"
# Test sync functionality: update deployment without destroying data
# First, create a marker file in the data directory to verify it's preserved
test_data_marker="$test_deployment_dir/data/test-data/sync-test-marker.txt"
mkdir -p "$test_deployment_dir/data/test-data"
echo "external-stack-data-$(date +%s)" > "$test_data_marker"
original_marker_content=$(<$test_data_marker)
# Verify deployment file exists and preserve its cluster ID
original_cluster_id=$(grep "cluster-id:" "$test_deployment_dir/deployment.yml" 2>/dev/null || echo "")
# Modify spec file to simulate an update
sed -i.bak 's/CERC_TEST_PARAM_1=PASSED/CERC_TEST_PARAM_1=UPDATED/' $test_deployment_spec
# Run sync to update deployment files without destroying data
$TEST_TARGET_SO_STACK deploy create --spec-file $test_deployment_spec --deployment-dir $test_deployment_dir --sync
# Verify the spec file was updated in deployment dir
updated_deployed_spec=$(<$test_deployment_dir/spec.yml)
if [[ "$updated_deployed_spec" == *"UPDATED"* ]]; then
echo "deploy sync test: spec file updated"
else
echo "deploy sync test: spec file not updated - FAILED"
exit 1
fi
# Verify the data marker file still exists with original content
if [ ! -f "$test_data_marker" ]; then
echo "deploy sync test: data file deleted - FAILED"
exit 1
fi
synced_marker_content=$(<$test_data_marker)
if [ "$synced_marker_content" == "$original_marker_content" ]; then
echo "deploy sync test: data preserved - passed"
else
echo "deploy sync test: data corrupted - FAILED"
exit 1
fi
# Verify cluster ID was preserved (not regenerated)
new_cluster_id=$(grep "cluster-id:" "$test_deployment_dir/deployment.yml" 2>/dev/null || echo "")
if [ -n "$original_cluster_id" ] && [ "$original_cluster_id" == "$new_cluster_id" ]; then
echo "deploy sync test: cluster ID preserved - passed"
else
echo "deploy sync test: cluster ID not preserved - FAILED"
exit 1
fi
echo "deploy sync test: passed"
# Try to start the deployment # Try to start the deployment
$TEST_TARGET_SO deployment --dir $test_deployment_dir start $TEST_TARGET_SO deployment --dir $test_deployment_dir start
# Check logs command works # Check logs command works