2023-09-10 19:28:26 +00:00
|
|
|
# Copyright © 2022, 2023 Vulcanize
|
2023-01-04 23:16:40 +00:00
|
|
|
|
|
|
|
# This program is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU Affero General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU Affero General Public License for more details.
|
|
|
|
|
|
|
|
# You should have received a copy of the GNU Affero General Public License
|
|
|
|
# along with this program. If not, see <http:#www.gnu.org/licenses/>.
|
|
|
|
|
|
|
|
# Builds or pulls containers for the system components
|
|
|
|
|
|
|
|
# env vars:
|
|
|
|
# CERC_REPO_BASE_DIR defaults to ~/cerc
|
|
|
|
|
|
|
|
import os
|
2023-01-19 20:38:04 +00:00
|
|
|
import sys
|
2023-02-21 13:56:33 +00:00
|
|
|
from shutil import rmtree, copytree
|
2023-01-04 23:16:40 +00:00
|
|
|
from decouple import config
|
|
|
|
import click
|
2023-01-12 04:56:05 +00:00
|
|
|
import importlib.resources
|
2023-01-19 20:00:35 +00:00
|
|
|
from python_on_whales import docker, DockerException
|
2023-11-07 07:06:55 +00:00
|
|
|
from stack_orchestrator.base import get_stack
|
|
|
|
from stack_orchestrator.util import include_exclude_check, get_parsed_stack_config
|
2023-01-04 23:16:40 +00:00
|
|
|
|
2023-02-20 23:16:15 +00:00
|
|
|
builder_js_image_name = "cerc/builder-js:local"
|
|
|
|
|
2023-09-04 19:00:23 +00:00
|
|
|
|
2023-01-04 23:16:40 +00:00
|
|
|
@click.command()
|
|
|
|
@click.option('--include', help="only build these packages")
|
|
|
|
@click.option('--exclude', help="don\'t build these packages")
|
2023-09-04 19:00:23 +00:00
|
|
|
@click.option("--force-rebuild", is_flag=True, default=False,
|
|
|
|
help="Override existing target package version check -- force rebuild")
|
2023-04-14 20:19:27 +00:00
|
|
|
@click.option("--extra-build-args", help="Supply extra arguments to build")
|
2023-01-04 23:16:40 +00:00
|
|
|
@click.pass_context
|
2023-04-14 20:19:27 +00:00
|
|
|
def command(ctx, include, exclude, force_rebuild, extra_build_args):
|
2023-01-04 23:16:40 +00:00
|
|
|
'''build the set of npm packages required for a complete stack'''
|
|
|
|
|
|
|
|
quiet = ctx.obj.quiet
|
|
|
|
verbose = ctx.obj.verbose
|
|
|
|
dry_run = ctx.obj.dry_run
|
|
|
|
local_stack = ctx.obj.local_stack
|
2023-01-06 17:20:18 +00:00
|
|
|
debug = ctx.obj.debug
|
2023-01-19 05:01:55 +00:00
|
|
|
stack = ctx.obj.stack
|
2023-01-19 20:38:04 +00:00
|
|
|
continue_on_error = ctx.obj.continue_on_error
|
2023-01-04 23:16:40 +00:00
|
|
|
|
2023-02-20 23:16:15 +00:00
|
|
|
_ensure_prerequisites()
|
|
|
|
|
2023-02-17 20:34:51 +00:00
|
|
|
# build-npms depends on having access to a writable package registry
|
|
|
|
# so we check here that it is available
|
2023-02-20 13:43:06 +00:00
|
|
|
package_registry_stack = get_stack(ctx.obj, "package-registry")
|
|
|
|
registry_available = package_registry_stack.ensure_available()
|
|
|
|
if not registry_available:
|
|
|
|
print("FATAL: no npm registry available for build-npms command")
|
|
|
|
sys.exit(1)
|
|
|
|
npm_registry_url = package_registry_stack.get_url()
|
|
|
|
npm_registry_url_token = config("CERC_NPM_AUTH_TOKEN", default=None)
|
|
|
|
if not npm_registry_url_token:
|
|
|
|
print("FATAL: CERC_NPM_AUTH_TOKEN is not defined")
|
|
|
|
sys.exit(1)
|
2023-02-17 20:34:51 +00:00
|
|
|
|
2023-01-04 23:16:40 +00:00
|
|
|
if local_stack:
|
|
|
|
dev_root_path = os.getcwd()[0:os.getcwd().rindex("stack-orchestrator")]
|
|
|
|
print(f'Local stack dev_root_path (CERC_REPO_BASE_DIR) overridden to: {dev_root_path}')
|
|
|
|
else:
|
|
|
|
dev_root_path = os.path.expanduser(config("CERC_REPO_BASE_DIR", default="~/cerc"))
|
|
|
|
|
2023-02-21 13:56:33 +00:00
|
|
|
build_root_path = os.path.join(dev_root_path, "build-trees")
|
|
|
|
|
|
|
|
if verbose:
|
2023-01-04 23:16:40 +00:00
|
|
|
print(f'Dev Root is: {dev_root_path}')
|
|
|
|
|
|
|
|
if not os.path.isdir(dev_root_path):
|
|
|
|
print('Dev root directory doesn\'t exist, creating')
|
2023-02-21 13:56:33 +00:00
|
|
|
os.makedirs(dev_root_path)
|
|
|
|
if not os.path.isdir(dev_root_path):
|
|
|
|
print('Build root directory doesn\'t exist, creating')
|
|
|
|
os.makedirs(build_root_path)
|
2023-01-04 23:16:40 +00:00
|
|
|
|
2023-01-08 03:44:08 +00:00
|
|
|
# See: https://stackoverflow.com/a/20885799/1701505
|
2023-11-07 07:06:55 +00:00
|
|
|
from stack_orchestrator import data
|
2023-01-08 03:44:08 +00:00
|
|
|
with importlib.resources.open_text(data, "npm-package-list.txt") as package_list_file:
|
2023-01-19 05:01:55 +00:00
|
|
|
all_packages = package_list_file.read().splitlines()
|
|
|
|
|
|
|
|
packages_in_scope = []
|
|
|
|
if stack:
|
2023-01-19 21:13:57 +00:00
|
|
|
stack_config = get_parsed_stack_config(stack)
|
|
|
|
# TODO: syntax check the input here
|
|
|
|
packages_in_scope = stack_config['npms']
|
2023-01-19 05:01:55 +00:00
|
|
|
else:
|
|
|
|
packages_in_scope = all_packages
|
2023-01-04 23:16:40 +00:00
|
|
|
|
|
|
|
if verbose:
|
2023-01-19 05:01:55 +00:00
|
|
|
print(f'Packages: {packages_in_scope}')
|
2023-01-04 23:16:40 +00:00
|
|
|
|
|
|
|
def build_package(package):
|
|
|
|
if not quiet:
|
2023-01-06 17:20:18 +00:00
|
|
|
print(f"Building npm package: {package}")
|
2023-01-04 23:16:40 +00:00
|
|
|
repo_dir = package
|
|
|
|
repo_full_path = os.path.join(dev_root_path, repo_dir)
|
2023-02-21 13:56:33 +00:00
|
|
|
# Copy the repo and build that to avoid propagating JS tooling file changes back into the cloned repo
|
|
|
|
repo_copy_path = os.path.join(build_root_path, repo_dir)
|
|
|
|
# First delete any old build tree
|
|
|
|
if os.path.isdir(repo_copy_path):
|
|
|
|
if verbose:
|
|
|
|
print(f"Deleting old build tree: {repo_copy_path}")
|
|
|
|
if not dry_run:
|
|
|
|
rmtree(repo_copy_path)
|
|
|
|
# Now copy the repo into the build tree location
|
|
|
|
if verbose:
|
|
|
|
print(f"Copying build tree from: {repo_full_path} to: {repo_copy_path}")
|
|
|
|
if not dry_run:
|
|
|
|
copytree(repo_full_path, repo_copy_path)
|
2023-02-17 20:34:51 +00:00
|
|
|
build_command = ["sh", "-c", f"cd /workspace && build-npm-package-local-dependencies.sh {npm_registry_url}"]
|
2023-01-04 23:16:40 +00:00
|
|
|
if not dry_run:
|
|
|
|
if verbose:
|
|
|
|
print(f"Executing: {build_command}")
|
2023-02-24 03:50:20 +00:00
|
|
|
# Originally we used the PEP 584 merge operator:
|
|
|
|
# envs = {"CERC_NPM_AUTH_TOKEN": npm_registry_url_token} | ({"CERC_SCRIPT_DEBUG": "true"} if debug else {})
|
|
|
|
# but that isn't available in Python 3.8 (default in Ubuntu 20) so for now we use dict.update:
|
2023-04-13 01:39:37 +00:00
|
|
|
envs = {"CERC_NPM_AUTH_TOKEN": npm_registry_url_token,
|
2023-09-04 19:00:23 +00:00
|
|
|
"LACONIC_HOSTED_CONFIG_FILE": "config-hosted.yml" # Convention used by our web app packages
|
2023-04-13 01:39:37 +00:00
|
|
|
}
|
2023-02-24 03:50:20 +00:00
|
|
|
envs.update({"CERC_SCRIPT_DEBUG": "true"} if debug else {})
|
2023-04-14 20:19:27 +00:00
|
|
|
envs.update({"CERC_FORCE_REBUILD": "true"} if force_rebuild else {})
|
|
|
|
envs.update({"CERC_CONTAINER_EXTRA_BUILD_ARGS": extra_build_args} if extra_build_args else {})
|
2023-01-19 20:00:35 +00:00
|
|
|
try:
|
2023-02-20 23:16:15 +00:00
|
|
|
docker.run(builder_js_image_name,
|
2023-01-19 20:30:01 +00:00
|
|
|
remove=True,
|
|
|
|
interactive=True,
|
|
|
|
tty=True,
|
|
|
|
user=f"{os.getuid()}:{os.getgid()}",
|
|
|
|
envs=envs,
|
2023-02-17 20:34:51 +00:00
|
|
|
# TODO: detect this host name in npm_registry_url rather than hard-wiring it
|
2023-01-19 20:30:01 +00:00
|
|
|
add_hosts=[("gitea.local", "host-gateway")],
|
2023-02-21 13:56:33 +00:00
|
|
|
volumes=[(repo_copy_path, "/workspace")],
|
2023-01-19 20:30:01 +00:00
|
|
|
command=build_command
|
|
|
|
)
|
|
|
|
# Note that although the docs say that build_result should contain
|
|
|
|
# the command output as a string, in reality it is always the empty string.
|
|
|
|
# Since we detect errors via catching exceptions below, we can safely ignore it here.
|
2023-01-19 20:00:35 +00:00
|
|
|
except DockerException as e:
|
2023-01-19 20:38:04 +00:00
|
|
|
print(f"Error executing build for {package} in container:\n {e}")
|
|
|
|
if not continue_on_error:
|
|
|
|
print("FATAL Error: build failed and --continue-on-error not set, exiting")
|
|
|
|
sys.exit(1)
|
|
|
|
else:
|
|
|
|
print("****** Build Error, continuing because --continue-on-error is set")
|
|
|
|
|
2023-01-04 23:16:40 +00:00
|
|
|
else:
|
|
|
|
print("Skipped")
|
|
|
|
|
2023-01-19 05:01:55 +00:00
|
|
|
for package in packages_in_scope:
|
2023-01-04 23:16:40 +00:00
|
|
|
if include_exclude_check(package, include, exclude):
|
|
|
|
build_package(package)
|
|
|
|
else:
|
|
|
|
if verbose:
|
|
|
|
print(f"Excluding: {package}")
|
2023-02-20 23:16:15 +00:00
|
|
|
|
|
|
|
|
|
|
|
def _ensure_prerequisites():
|
|
|
|
# Check that the builder-js container is available and
|
|
|
|
# Tell the user how to build it if not
|
|
|
|
images = docker.image.list(builder_js_image_name)
|
|
|
|
if len(images) == 0:
|
|
|
|
print(f"FATAL: builder image: {builder_js_image_name} is required but was not found")
|
|
|
|
print("Please run this command to create it: laconic-so --stack build-support build-containers")
|
|
|
|
sys.exit(1)
|