Compare commits
No commits in common. "vaasl-deploy" and "main" have entirely different histories.
vaasl-depl
...
main
@ -26,7 +26,6 @@ from decouple import config
|
|||||||
import subprocess
|
import subprocess
|
||||||
import click
|
import click
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any
|
|
||||||
from stack_orchestrator.opts import opts
|
from stack_orchestrator.opts import opts
|
||||||
from stack_orchestrator.util import include_exclude_check, stack_is_external, error_exit
|
from stack_orchestrator.util import include_exclude_check, stack_is_external, error_exit
|
||||||
from stack_orchestrator.base import get_npm_registry_url
|
from stack_orchestrator.base import get_npm_registry_url
|
||||||
@ -43,7 +42,7 @@ def make_container_build_env(dev_root_path: str,
|
|||||||
debug: bool,
|
debug: bool,
|
||||||
force_rebuild: bool,
|
force_rebuild: bool,
|
||||||
extra_build_args: str):
|
extra_build_args: str):
|
||||||
command_env: dict[str, Any] = {
|
container_build_env = {
|
||||||
"CERC_NPM_REGISTRY_URL": get_npm_registry_url(),
|
"CERC_NPM_REGISTRY_URL": get_npm_registry_url(),
|
||||||
"CERC_GO_AUTH_TOKEN": config("CERC_GO_AUTH_TOKEN", default=""),
|
"CERC_GO_AUTH_TOKEN": config("CERC_GO_AUTH_TOKEN", default=""),
|
||||||
"CERC_NPM_AUTH_TOKEN": config("CERC_NPM_AUTH_TOKEN", default=""),
|
"CERC_NPM_AUTH_TOKEN": config("CERC_NPM_AUTH_TOKEN", default=""),
|
||||||
@ -53,16 +52,14 @@ def make_container_build_env(dev_root_path: str,
|
|||||||
"CERC_HOST_GID": f"{os.getgid()}",
|
"CERC_HOST_GID": f"{os.getgid()}",
|
||||||
"DOCKER_BUILDKIT": config("DOCKER_BUILDKIT", default="0")
|
"DOCKER_BUILDKIT": config("DOCKER_BUILDKIT", default="0")
|
||||||
}
|
}
|
||||||
command_env.update({"CERC_SCRIPT_DEBUG": "true"} if debug else {})
|
container_build_env.update({"CERC_SCRIPT_DEBUG": "true"} if debug else {})
|
||||||
command_env.update({"CERC_FORCE_REBUILD": "true"} if force_rebuild else {})
|
container_build_env.update({"CERC_FORCE_REBUILD": "true"} if force_rebuild else {})
|
||||||
command_env.update({"CERC_CONTAINER_EXTRA_BUILD_ARGS": extra_build_args} if extra_build_args else {})
|
container_build_env.update({"CERC_CONTAINER_EXTRA_BUILD_ARGS": extra_build_args} if extra_build_args else {})
|
||||||
|
docker_host_env = os.getenv("DOCKER_HOST")
|
||||||
|
if docker_host_env:
|
||||||
|
container_build_env.update({"DOCKER_HOST": docker_host_env})
|
||||||
|
|
||||||
forwarded_vars = ("DOCKER_HOST", "BUILDKIT_PROGRESS", "http_proxy", "https_proxy")
|
return container_build_env
|
||||||
for var in forwarded_vars:
|
|
||||||
if value := config(var, default=None):
|
|
||||||
command_env[var] = value
|
|
||||||
|
|
||||||
return command_env
|
|
||||||
|
|
||||||
|
|
||||||
def process_container(build_context: BuildContext) -> bool:
|
def process_container(build_context: BuildContext) -> bool:
|
||||||
|
|||||||
@ -14,6 +14,7 @@
|
|||||||
# along with this program. If not, see <http:#www.gnu.org/licenses/>.
|
# along with this program. If not, see <http:#www.gnu.org/licenses/>.
|
||||||
|
|
||||||
from stack_orchestrator.deploy.deployment_context import DeploymentContext
|
from stack_orchestrator.deploy.deployment_context import DeploymentContext
|
||||||
|
from ruamel.yaml import YAML
|
||||||
|
|
||||||
|
|
||||||
def create(context: DeploymentContext, extra_args):
|
def create(context: DeploymentContext, extra_args):
|
||||||
@ -22,12 +23,17 @@ def create(context: DeploymentContext, extra_args):
|
|||||||
# deterministic-deployment-proxy contract, which itself is a prereq for Optimism contract deployment
|
# deterministic-deployment-proxy contract, which itself is a prereq for Optimism contract deployment
|
||||||
fixturenet_eth_compose_file = context.deployment_dir.joinpath('compose', 'docker-compose-fixturenet-eth.yml')
|
fixturenet_eth_compose_file = context.deployment_dir.joinpath('compose', 'docker-compose-fixturenet-eth.yml')
|
||||||
|
|
||||||
|
with open(fixturenet_eth_compose_file, 'r') as yaml_file:
|
||||||
|
yaml = YAML()
|
||||||
|
yaml_data = yaml.load(yaml_file)
|
||||||
|
|
||||||
new_script = '../config/fixturenet-optimism/run-geth.sh:/opt/testnet/run.sh'
|
new_script = '../config/fixturenet-optimism/run-geth.sh:/opt/testnet/run.sh'
|
||||||
|
|
||||||
def add_geth_volume(yaml_data):
|
if new_script not in yaml_data['services']['fixturenet-eth-geth-1']['volumes']:
|
||||||
if new_script not in yaml_data['services']['fixturenet-eth-geth-1']['volumes']:
|
yaml_data['services']['fixturenet-eth-geth-1']['volumes'].append(new_script)
|
||||||
yaml_data['services']['fixturenet-eth-geth-1']['volumes'].append(new_script)
|
|
||||||
|
|
||||||
context.modify_yaml(fixturenet_eth_compose_file, add_geth_volume)
|
with open(fixturenet_eth_compose_file, 'w') as yaml_file:
|
||||||
|
yaml = YAML()
|
||||||
|
yaml.dump(yaml_data, yaml_file)
|
||||||
|
|
||||||
return None
|
return None
|
||||||
|
|||||||
@ -2,6 +2,7 @@ version: "1.0"
|
|||||||
name: test
|
name: test
|
||||||
description: "A test stack"
|
description: "A test stack"
|
||||||
repos:
|
repos:
|
||||||
|
- git.vdb.to/cerc-io/laconicd
|
||||||
- git.vdb.to/cerc-io/test-project@test-branch
|
- git.vdb.to/cerc-io/test-project@test-branch
|
||||||
containers:
|
containers:
|
||||||
- cerc/test-container
|
- cerc/test-container
|
||||||
|
|||||||
@ -45,14 +45,11 @@ class DeploymentContext:
|
|||||||
def get_compose_dir(self):
|
def get_compose_dir(self):
|
||||||
return self.deployment_dir.joinpath(constants.compose_dir_name)
|
return self.deployment_dir.joinpath(constants.compose_dir_name)
|
||||||
|
|
||||||
def get_compose_file(self, name: str):
|
|
||||||
return self.get_compose_dir() / f"docker-compose-{name}.yml"
|
|
||||||
|
|
||||||
def get_cluster_id(self):
|
def get_cluster_id(self):
|
||||||
return self.id
|
return self.id
|
||||||
|
|
||||||
def init(self, dir: Path):
|
def init(self, dir):
|
||||||
self.deployment_dir = dir.absolute()
|
self.deployment_dir = dir
|
||||||
self.spec = Spec()
|
self.spec = Spec()
|
||||||
self.spec.init_from_file(self.get_spec_file())
|
self.spec.init_from_file(self.get_spec_file())
|
||||||
self.stack = Stack(self.spec.obj["stack"])
|
self.stack = Stack(self.spec.obj["stack"])
|
||||||
@ -69,19 +66,3 @@ class DeploymentContext:
|
|||||||
unique_cluster_descriptor = f"{path},{self.get_stack_file()},None,None"
|
unique_cluster_descriptor = f"{path},{self.get_stack_file()},None,None"
|
||||||
hash = hashlib.md5(unique_cluster_descriptor.encode()).hexdigest()[:16]
|
hash = hashlib.md5(unique_cluster_descriptor.encode()).hexdigest()[:16]
|
||||||
self.id = f"{constants.cluster_name_prefix}{hash}"
|
self.id = f"{constants.cluster_name_prefix}{hash}"
|
||||||
|
|
||||||
def modify_yaml(self, file_path: Path, modifier_func):
|
|
||||||
"""
|
|
||||||
Load a YAML from the deployment, apply a modification function, and write it back.
|
|
||||||
"""
|
|
||||||
if not file_path.absolute().is_relative_to(self.deployment_dir):
|
|
||||||
raise ValueError(f"File is not inside deployment directory: {file_path}")
|
|
||||||
|
|
||||||
yaml = get_yaml()
|
|
||||||
with open(file_path, 'r') as f:
|
|
||||||
yaml_data = yaml.load(f)
|
|
||||||
|
|
||||||
modifier_func(yaml_data)
|
|
||||||
|
|
||||||
with open(file_path, 'w') as f:
|
|
||||||
yaml.dump(yaml_data, f)
|
|
||||||
|
|||||||
@ -422,10 +422,9 @@ def _copy_files_to_directory(file_paths: List[Path], directory: Path):
|
|||||||
copy(path, os.path.join(directory, os.path.basename(path)))
|
copy(path, os.path.join(directory, os.path.basename(path)))
|
||||||
|
|
||||||
|
|
||||||
def _create_deployment_file(deployment_dir: Path, cluster):
|
def _create_deployment_file(deployment_dir: Path):
|
||||||
deployment_file_path = deployment_dir.joinpath(constants.deployment_file_name)
|
deployment_file_path = deployment_dir.joinpath(constants.deployment_file_name)
|
||||||
if cluster is None:
|
cluster = f"{constants.cluster_name_prefix}{token_hex(8)}"
|
||||||
cluster = f"{constants.cluster_name_prefix}{token_hex(8)}"
|
|
||||||
with open(deployment_file_path, "w") as output_file:
|
with open(deployment_file_path, "w") as output_file:
|
||||||
output_file.write(f"{constants.cluster_id_key}: {cluster}\n")
|
output_file.write(f"{constants.cluster_id_key}: {cluster}\n")
|
||||||
|
|
||||||
@ -444,16 +443,18 @@ def _check_volume_definitions(spec):
|
|||||||
@click.command()
|
@click.command()
|
||||||
@click.option("--spec-file", required=True, help="Spec file to use to create this deployment")
|
@click.option("--spec-file", required=True, help="Spec file to use to create this deployment")
|
||||||
@click.option("--deployment-dir", help="Create deployment files in this directory")
|
@click.option("--deployment-dir", help="Create deployment files in this directory")
|
||||||
@click.argument('extra_args', nargs=-1, type=click.UNPROCESSED)
|
# TODO: Hack
|
||||||
|
@click.option("--network-dir", help="Network configuration supplied in this directory")
|
||||||
|
@click.option("--initial-peers", help="Initial set of persistent peers")
|
||||||
@click.pass_context
|
@click.pass_context
|
||||||
def create(ctx, spec_file, deployment_dir, extra_args):
|
def create(ctx, spec_file, deployment_dir, network_dir, initial_peers):
|
||||||
deployment_command_context = ctx.obj
|
deployment_command_context = ctx.obj
|
||||||
return create_operation(deployment_command_context, spec_file, deployment_dir, extra_args)
|
return create_operation(deployment_command_context, spec_file, deployment_dir, network_dir, initial_peers)
|
||||||
|
|
||||||
|
|
||||||
# The init command's implementation is in a separate function so that we can
|
# The init command's implementation is in a separate function so that we can
|
||||||
# call it from other commands, bypassing the click decoration stuff
|
# call it from other commands, bypassing the click decoration stuff
|
||||||
def create_operation(deployment_command_context, spec_file, deployment_dir, extra_args):
|
def create_operation(deployment_command_context, spec_file, deployment_dir, network_dir, initial_peers):
|
||||||
parsed_spec = Spec(os.path.abspath(spec_file), get_parsed_deployment_spec(spec_file))
|
parsed_spec = Spec(os.path.abspath(spec_file), get_parsed_deployment_spec(spec_file))
|
||||||
_check_volume_definitions(parsed_spec)
|
_check_volume_definitions(parsed_spec)
|
||||||
stack_name = parsed_spec["stack"]
|
stack_name = parsed_spec["stack"]
|
||||||
@ -472,7 +473,7 @@ def create_operation(deployment_command_context, spec_file, deployment_dir, extr
|
|||||||
# Copy spec file and the stack file into the deployment dir
|
# Copy spec file and the stack file into the deployment dir
|
||||||
copyfile(spec_file, deployment_dir_path.joinpath(constants.spec_file_name))
|
copyfile(spec_file, deployment_dir_path.joinpath(constants.spec_file_name))
|
||||||
copyfile(stack_file, deployment_dir_path.joinpath(constants.stack_file_name))
|
copyfile(stack_file, deployment_dir_path.joinpath(constants.stack_file_name))
|
||||||
_create_deployment_file(deployment_dir_path, deployment_command_context.cluster_context.cluster)
|
_create_deployment_file(deployment_dir_path)
|
||||||
# Copy any config varibles from the spec file into an env file suitable for compose
|
# Copy any config varibles from the spec file into an env file suitable for compose
|
||||||
_write_config_file(spec_file, deployment_dir_path.joinpath(constants.config_file_name))
|
_write_config_file(spec_file, deployment_dir_path.joinpath(constants.config_file_name))
|
||||||
# Copy any k8s config file into the deployment dir
|
# Copy any k8s config file into the deployment dir
|
||||||
@ -540,7 +541,7 @@ def create_operation(deployment_command_context, spec_file, deployment_dir, extr
|
|||||||
deployer_config_generator = getDeployerConfigGenerator(deployment_type, deployment_context)
|
deployer_config_generator = getDeployerConfigGenerator(deployment_type, deployment_context)
|
||||||
# TODO: make deployment_dir_path a Path above
|
# TODO: make deployment_dir_path a Path above
|
||||||
deployer_config_generator.generate(deployment_dir_path)
|
deployer_config_generator.generate(deployment_dir_path)
|
||||||
call_stack_deploy_create(deployment_context, extra_args)
|
call_stack_deploy_create(deployment_context, [network_dir, initial_peers, deployment_command_context])
|
||||||
|
|
||||||
|
|
||||||
# TODO: this code should be in the stack .py files but
|
# TODO: this code should be in the stack .py files but
|
||||||
|
|||||||
@ -92,6 +92,7 @@ def create_deployment(ctx, deployment_dir, image, url, kube_config, image_regist
|
|||||||
spec_file_name,
|
spec_file_name,
|
||||||
deployment_dir,
|
deployment_dir,
|
||||||
None,
|
None,
|
||||||
|
None
|
||||||
)
|
)
|
||||||
# Fix up the container tag inside the deployment compose file
|
# Fix up the container tag inside the deployment compose file
|
||||||
_fixup_container_tag(deployment_dir, image)
|
_fixup_container_tag(deployment_dir, image)
|
||||||
|
|||||||
@ -14,13 +14,8 @@ delete_cluster_exit () {
|
|||||||
|
|
||||||
# Test basic stack-orchestrator deploy
|
# Test basic stack-orchestrator deploy
|
||||||
echo "Running stack-orchestrator deploy test"
|
echo "Running stack-orchestrator deploy test"
|
||||||
|
# Bit of a hack, test the most recent package
|
||||||
if [ "$1" == "from-path" ]; then
|
TEST_TARGET_SO=$( ls -t1 ./package/laconic-so* | head -1 )
|
||||||
TEST_TARGET_SO="laconic-so"
|
|
||||||
else
|
|
||||||
TEST_TARGET_SO=$( ls -t1 ./package/laconic-so* | head -1 )
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Set a non-default repo dir
|
# Set a non-default repo dir
|
||||||
export CERC_REPO_BASE_DIR=~/stack-orchestrator-test/repo-base-dir
|
export CERC_REPO_BASE_DIR=~/stack-orchestrator-test/repo-base-dir
|
||||||
echo "Testing this package: $TEST_TARGET_SO"
|
echo "Testing this package: $TEST_TARGET_SO"
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user