forked from cerc-io/stack-orchestrator
Add deployment scripting (#444)
This commit is contained in:
parent
950857fa84
commit
1f9131ff5a
@ -15,7 +15,7 @@
|
|||||||
|
|
||||||
import os
|
import os
|
||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
from .deploy import get_stack_status
|
from app.deploy import get_stack_status
|
||||||
from decouple import config
|
from decouple import config
|
||||||
|
|
||||||
|
|
||||||
|
@ -27,8 +27,8 @@ import subprocess
|
|||||||
import click
|
import click
|
||||||
import importlib.resources
|
import importlib.resources
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from .util import include_exclude_check, get_parsed_stack_config
|
from app.util import include_exclude_check, get_parsed_stack_config
|
||||||
from .base import get_npm_registry_url
|
from app.base import get_npm_registry_url
|
||||||
|
|
||||||
# TODO: find a place for this
|
# TODO: find a place for this
|
||||||
# epilog="Config provided either in .env or settings.ini or env vars: CERC_REPO_BASE_DIR (defaults to ~/cerc)"
|
# epilog="Config provided either in .env or settings.ini or env vars: CERC_REPO_BASE_DIR (defaults to ~/cerc)"
|
||||||
@ -67,7 +67,7 @@ def command(ctx, include, exclude, force_rebuild, extra_build_args):
|
|||||||
print('Dev root directory doesn\'t exist, creating')
|
print('Dev root directory doesn\'t exist, creating')
|
||||||
|
|
||||||
# See: https://stackoverflow.com/a/20885799/1701505
|
# See: https://stackoverflow.com/a/20885799/1701505
|
||||||
from . import data
|
from app import data
|
||||||
with importlib.resources.open_text(data, "container-image-list.txt") as container_list_file:
|
with importlib.resources.open_text(data, "container-image-list.txt") as container_list_file:
|
||||||
all_containers = container_list_file.read().splitlines()
|
all_containers = container_list_file.read().splitlines()
|
||||||
|
|
||||||
|
@ -25,8 +25,8 @@ from decouple import config
|
|||||||
import click
|
import click
|
||||||
import importlib.resources
|
import importlib.resources
|
||||||
from python_on_whales import docker, DockerException
|
from python_on_whales import docker, DockerException
|
||||||
from .base import get_stack
|
from app.base import get_stack
|
||||||
from .util import include_exclude_check, get_parsed_stack_config
|
from app.util import include_exclude_check, get_parsed_stack_config
|
||||||
|
|
||||||
builder_js_image_name = "cerc/builder-js:local"
|
builder_js_image_name = "cerc/builder-js:local"
|
||||||
|
|
||||||
@ -81,7 +81,7 @@ def command(ctx, include, exclude, force_rebuild, extra_build_args):
|
|||||||
os.makedirs(build_root_path)
|
os.makedirs(build_root_path)
|
||||||
|
|
||||||
# See: https://stackoverflow.com/a/20885799/1701505
|
# See: https://stackoverflow.com/a/20885799/1701505
|
||||||
from . import data
|
from app import data
|
||||||
with importlib.resources.open_text(data, "npm-package-list.txt") as package_list_file:
|
with importlib.resources.open_text(data, "npm-package-list.txt") as package_list_file:
|
||||||
all_packages = package_list_file.read().splitlines()
|
all_packages = package_list_file.read().splitlines()
|
||||||
|
|
||||||
|
@ -2,7 +2,7 @@ services:
|
|||||||
laconicd:
|
laconicd:
|
||||||
restart: no
|
restart: no
|
||||||
image: cerc/laconicd:local
|
image: cerc/laconicd:local
|
||||||
command: ["sh", "/docker-entrypoint-scripts.d/create-fixturenet.sh"]
|
command: ["/bin/sh", "-c", "while :; do sleep 600; done"]
|
||||||
volumes:
|
volumes:
|
||||||
# The cosmos-sdk node's database directory:
|
# The cosmos-sdk node's database directory:
|
||||||
- laconicd-data:/root/.laconicd/data
|
- laconicd-data:/root/.laconicd/data
|
||||||
|
@ -13,45 +13,50 @@
|
|||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <http:#www.gnu.org/licenses/>.
|
# along with this program. If not, see <http:#www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import click
|
from dataclasses import dataclass
|
||||||
import os
|
from app.util import get_yaml
|
||||||
from shutil import copyfile
|
from app.stack_state import State
|
||||||
import sys
|
|
||||||
from .util import get_stack_config_filename, get_parsed_deployment_spec
|
|
||||||
|
|
||||||
default_spec_file_content = """stack: mainnet-laconic
|
default_spec_file_content = """config:
|
||||||
data_dir: /my/path
|
node_moniker: my-node-name
|
||||||
node_name: my-node-name
|
chain_id: my-chain-id
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
init_help_text = """Add helpful text here on setting config variables.
|
||||||
|
"""
|
||||||
|
|
||||||
def make_default_deployment_dir():
|
@dataclass
|
||||||
return "deployment-001"
|
class VolumeMapping:
|
||||||
|
host_path: str
|
||||||
@click.command()
|
container_path: str
|
||||||
@click.option("--output", required=True, help="Write yaml spec file here")
|
|
||||||
@click.pass_context
|
|
||||||
def init(ctx, output):
|
|
||||||
with open(output, "w") as output_file:
|
|
||||||
output_file.write(default_spec_file_content)
|
|
||||||
|
|
||||||
|
|
||||||
@click.command()
|
# In order to make this, we need the ability to run the stack
|
||||||
@click.option("--spec-file", required=True, help="Spec file to use to create this deployment")
|
# In theory we can make this same way as we would run deploy up
|
||||||
@click.option("--deployment-dir", help="Create deployment files in this directory")
|
def run_container_command(ctx, ontainer, command, mounts):
|
||||||
@click.pass_context
|
deploy_context = ctx.obj
|
||||||
def create(ctx, spec_file, deployment_dir):
|
pass
|
||||||
# This function fails with a useful error message if the file doens't exist
|
|
||||||
parsed_spec = get_parsed_deployment_spec(spec_file)
|
|
||||||
if ctx.debug:
|
def setup(ctx):
|
||||||
print(f"parsed spec: {parsed_spec}")
|
node_moniker = "dbdb-node"
|
||||||
if deployment_dir is None:
|
chain_id = "laconic_81337-1"
|
||||||
deployment_dir = make_default_deployment_dir()
|
mounts = [
|
||||||
if os.path.exists(deployment_dir):
|
VolumeMapping("./path", "~/.laconicd")
|
||||||
print(f"Error: {deployment_dir} already exists")
|
]
|
||||||
sys.exit(1)
|
output, status = run_container_command(ctx, "laconicd", f"laconicd init {node_moniker} --chain-id {chain_id}", mounts)
|
||||||
os.mkdir(deployment_dir)
|
|
||||||
# Copy spec file and the stack file into the deployment dir
|
|
||||||
copyfile(spec_file, os.path.join(deployment_dir, os.path.basename(spec_file)))
|
def init(command_context):
|
||||||
stack_file = get_stack_config_filename(parsed_spec.stack)
|
print(init_help_text)
|
||||||
copyfile(stack_file, os.path.join(deployment_dir, os.path.basename(stack_file)))
|
yaml = get_yaml()
|
||||||
|
return yaml.load(default_spec_file_content)
|
||||||
|
|
||||||
|
|
||||||
|
def get_state(command_context):
|
||||||
|
print("Here we get state")
|
||||||
|
return State.CONFIGURED
|
||||||
|
|
||||||
|
|
||||||
|
def change_state(command_context):
|
||||||
|
pass
|
||||||
|
@ -25,7 +25,4 @@ containers:
|
|||||||
pods:
|
pods:
|
||||||
- mainnet-laconicd
|
- mainnet-laconicd
|
||||||
- fixturenet-laconic-console
|
- fixturenet-laconic-console
|
||||||
config:
|
|
||||||
cli:
|
|
||||||
key: laconicd.mykey
|
|
||||||
address: laconicd.myaddress
|
|
||||||
|
@ -26,9 +26,10 @@ import subprocess
|
|||||||
from python_on_whales import DockerClient, DockerException
|
from python_on_whales import DockerClient, DockerException
|
||||||
import click
|
import click
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from .util import include_exclude_check, get_parsed_stack_config, global_options2
|
from app.util import include_exclude_check, get_parsed_stack_config, global_options2
|
||||||
from .deployment_create import create as deployment_create
|
from app.deployment_create import create as deployment_create
|
||||||
from .deployment_create import init as deployment_init
|
from app.deployment_create import init as deployment_init
|
||||||
|
from app.deployment_create import setup as deployment_setup
|
||||||
|
|
||||||
|
|
||||||
class DeployCommandContext(object):
|
class DeployCommandContext(object):
|
||||||
@ -263,7 +264,7 @@ def _make_cluster_context(ctx, stack, include, exclude, cluster, env_file):
|
|||||||
print(f"Using cluster name: {cluster}")
|
print(f"Using cluster name: {cluster}")
|
||||||
|
|
||||||
# See: https://stackoverflow.com/a/20885799/1701505
|
# See: https://stackoverflow.com/a/20885799/1701505
|
||||||
from . import data
|
from app import data
|
||||||
with resources.open_text(data, "pod-list.txt") as pod_list_file:
|
with resources.open_text(data, "pod-list.txt") as pod_list_file:
|
||||||
all_pods = pod_list_file.read().splitlines()
|
all_pods = pod_list_file.read().splitlines()
|
||||||
|
|
||||||
@ -420,3 +421,4 @@ def _orchestrate_cluster_config(ctx, cluster_config, docker, container_exec_env)
|
|||||||
|
|
||||||
command.add_command(deployment_init)
|
command.add_command(deployment_init)
|
||||||
command.add_command(deployment_create)
|
command.add_command(deployment_create)
|
||||||
|
command.add_command(deployment_setup)
|
||||||
|
@ -17,8 +17,8 @@ import click
|
|||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import sys
|
import sys
|
||||||
from .deploy import up_operation, down_operation, ps_operation, port_operation, exec_operation, logs_operation, create_deploy_context
|
from app.deploy import up_operation, down_operation, ps_operation, port_operation, exec_operation, logs_operation, create_deploy_context
|
||||||
from .util import global_options
|
from app.util import global_options
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@ -128,15 +128,3 @@ def logs(ctx, extra_args):
|
|||||||
@click.pass_context
|
@click.pass_context
|
||||||
def status(ctx):
|
def status(ctx):
|
||||||
print(f"Context: {ctx.parent.obj}")
|
print(f"Context: {ctx.parent.obj}")
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
#from importlib import resources, util
|
|
||||||
# TODO: figure out how to do this dynamically
|
|
||||||
#stack = "mainnet-laconic"
|
|
||||||
#module_name = "commands"
|
|
||||||
#spec = util.spec_from_file_location(module_name, "./app/data/stacks/" + stack + "/deploy/commands.py")
|
|
||||||
#imported_stack = util.module_from_spec(spec)
|
|
||||||
#spec.loader.exec_module(imported_stack)
|
|
||||||
#command.add_command(imported_stack.init)
|
|
||||||
#command.add_command(imported_stack.create)
|
|
||||||
|
@ -14,20 +14,12 @@
|
|||||||
# along with this program. If not, see <http:#www.gnu.org/licenses/>.
|
# along with this program. If not, see <http:#www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import click
|
import click
|
||||||
|
from importlib import util
|
||||||
import os
|
import os
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from shutil import copyfile, copytree
|
from shutil import copyfile, copytree
|
||||||
import sys
|
import sys
|
||||||
import ruamel.yaml
|
from app.util import get_stack_file_path, get_parsed_deployment_spec, get_parsed_stack_config, global_options, get_yaml
|
||||||
from .util import get_stack_file_path, get_parsed_deployment_spec, get_parsed_stack_config, global_options
|
|
||||||
|
|
||||||
|
|
||||||
def _get_yaml():
|
|
||||||
# See: https://stackoverflow.com/a/45701840/1701505
|
|
||||||
yaml = ruamel.yaml.YAML()
|
|
||||||
yaml.preserve_quotes = True
|
|
||||||
yaml.indent(sequence=3, offset=1)
|
|
||||||
return yaml
|
|
||||||
|
|
||||||
|
|
||||||
def _make_default_deployment_dir():
|
def _make_default_deployment_dir():
|
||||||
@ -47,7 +39,7 @@ def _get_named_volumes(stack):
|
|||||||
named_volumes = []
|
named_volumes = []
|
||||||
parsed_stack = get_parsed_stack_config(stack)
|
parsed_stack = get_parsed_stack_config(stack)
|
||||||
pods = parsed_stack["pods"]
|
pods = parsed_stack["pods"]
|
||||||
yaml = _get_yaml()
|
yaml = get_yaml()
|
||||||
for pod in pods:
|
for pod in pods:
|
||||||
pod_file_path = os.path.join(_get_compose_file_dir(), f"docker-compose-{pod}.yml")
|
pod_file_path = os.path.join(_get_compose_file_dir(), f"docker-compose-{pod}.yml")
|
||||||
parsed_pod_file = yaml.load(open(pod_file_path, "r"))
|
parsed_pod_file = yaml.load(open(pod_file_path, "r"))
|
||||||
@ -96,6 +88,29 @@ def _fixup_pod_file(pod, spec, compose_dir):
|
|||||||
pod["volumes"][volume] = new_volume_spec
|
pod["volumes"][volume] = new_volume_spec
|
||||||
|
|
||||||
|
|
||||||
|
def call_stack_deploy_init(stack):
|
||||||
|
# Link with the python file in the stack
|
||||||
|
# Call a function in it
|
||||||
|
# If no function found, return None
|
||||||
|
python_file_path = get_stack_file_path(stack).parent.joinpath("deploy", "commands.py")
|
||||||
|
spec = util.spec_from_file_location("commands", python_file_path)
|
||||||
|
imported_stack = util.module_from_spec(spec)
|
||||||
|
spec.loader.exec_module(imported_stack)
|
||||||
|
return imported_stack.init(None)
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: fold this with function above
|
||||||
|
def call_stack_deploy_setup(stack):
|
||||||
|
# Link with the python file in the stack
|
||||||
|
# Call a function in it
|
||||||
|
# If no function found, return None
|
||||||
|
python_file_path = get_stack_file_path(stack).parent.joinpath("deploy", "commands.py")
|
||||||
|
spec = util.spec_from_file_location("commands", python_file_path)
|
||||||
|
imported_stack = util.module_from_spec(spec)
|
||||||
|
spec.loader.exec_module(imported_stack)
|
||||||
|
return imported_stack.setup(None)
|
||||||
|
|
||||||
|
|
||||||
# Inspect the pod yaml to find config files referenced in subdirectories
|
# Inspect the pod yaml to find config files referenced in subdirectories
|
||||||
# other than the one associated with the pod
|
# other than the one associated with the pod
|
||||||
def _find_extra_config_dirs(parsed_pod_file, pod):
|
def _find_extra_config_dirs(parsed_pod_file, pod):
|
||||||
@ -118,17 +133,19 @@ def _find_extra_config_dirs(parsed_pod_file, pod):
|
|||||||
@click.option("--output", required=True, help="Write yaml spec file here")
|
@click.option("--output", required=True, help="Write yaml spec file here")
|
||||||
@click.pass_context
|
@click.pass_context
|
||||||
def init(ctx, output):
|
def init(ctx, output):
|
||||||
yaml = _get_yaml()
|
yaml = get_yaml()
|
||||||
stack = global_options(ctx).stack
|
stack = global_options(ctx).stack
|
||||||
verbose = global_options(ctx).verbose
|
verbose = global_options(ctx).verbose
|
||||||
|
default_spec_file_content = call_stack_deploy_init(stack)
|
||||||
spec_file_content = {"stack": stack}
|
spec_file_content = {"stack": stack}
|
||||||
|
spec_file_content.update(default_spec_file_content)
|
||||||
if verbose:
|
if verbose:
|
||||||
print(f"Creating spec file for stack: {stack}")
|
print(f"Creating spec file for stack: {stack}")
|
||||||
named_volumes = _get_named_volumes(stack)
|
named_volumes = _get_named_volumes(stack)
|
||||||
if named_volumes:
|
if named_volumes:
|
||||||
volume_descriptors = {}
|
volume_descriptors = {}
|
||||||
for named_volume in named_volumes:
|
for named_volume in named_volumes:
|
||||||
volume_descriptors[named_volume] = f"../data/{named_volume}"
|
volume_descriptors[named_volume] = f"./data/{named_volume}"
|
||||||
spec_file_content["volumes"] = volume_descriptors
|
spec_file_content["volumes"] = volume_descriptors
|
||||||
with open(output, "w") as output_file:
|
with open(output, "w") as output_file:
|
||||||
yaml.dump(spec_file_content, output_file)
|
yaml.dump(spec_file_content, output_file)
|
||||||
@ -160,7 +177,7 @@ def create(ctx, spec_file, deployment_dir):
|
|||||||
destination_compose_dir = os.path.join(deployment_dir, "compose")
|
destination_compose_dir = os.path.join(deployment_dir, "compose")
|
||||||
os.mkdir(destination_compose_dir)
|
os.mkdir(destination_compose_dir)
|
||||||
data_dir = Path(__file__).absolute().parent.joinpath("data")
|
data_dir = Path(__file__).absolute().parent.joinpath("data")
|
||||||
yaml = _get_yaml()
|
yaml = get_yaml()
|
||||||
for pod in pods:
|
for pod in pods:
|
||||||
pod_file_path = os.path.join(_get_compose_file_dir(), f"docker-compose-{pod}.yml")
|
pod_file_path = os.path.join(_get_compose_file_dir(), f"docker-compose-{pod}.yml")
|
||||||
parsed_pod_file = yaml.load(open(pod_file_path, "r"))
|
parsed_pod_file = yaml.load(open(pod_file_path, "r"))
|
||||||
@ -180,3 +197,16 @@ def create(ctx, spec_file, deployment_dir):
|
|||||||
# If the same config dir appears in multiple pods, it may already have been copied
|
# If the same config dir appears in multiple pods, it may already have been copied
|
||||||
if not os.path.exists(destination_config_dir):
|
if not os.path.exists(destination_config_dir):
|
||||||
copytree(source_config_dir, destination_config_dir)
|
copytree(source_config_dir, destination_config_dir)
|
||||||
|
|
||||||
|
|
||||||
|
@click.command()
|
||||||
|
@click.option("--node-moniker", help="Help goes here")
|
||||||
|
@click.option("--key-name", help="Help goes here")
|
||||||
|
@click.option("--initialize-network", is_flag=True, default=False, help="Help goes here")
|
||||||
|
@click.option("--join-network", is_flag=True, default=False, help="Help goes here")
|
||||||
|
@click.option("--create-network", is_flag=True, default=False, help="Help goes here")
|
||||||
|
@click.pass_context
|
||||||
|
def setup(ctx, node_moniker, key_name, initialize_network, join_network, create_network):
|
||||||
|
stack = global_options(ctx).stack
|
||||||
|
call_stack_deploy_setup(stack)
|
||||||
|
|
||||||
|
@ -25,7 +25,7 @@ import click
|
|||||||
import importlib.resources
|
import importlib.resources
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import yaml
|
import yaml
|
||||||
from .util import include_exclude_check
|
from app.util import include_exclude_check
|
||||||
|
|
||||||
|
|
||||||
class GitProgress(git.RemoteProgress):
|
class GitProgress(git.RemoteProgress):
|
||||||
@ -227,7 +227,7 @@ def command(ctx, include, exclude, git_ssh, check_only, pull, branches, branches
|
|||||||
os.makedirs(dev_root_path)
|
os.makedirs(dev_root_path)
|
||||||
|
|
||||||
# See: https://stackoverflow.com/a/20885799/1701505
|
# See: https://stackoverflow.com/a/20885799/1701505
|
||||||
from . import data
|
from app import data
|
||||||
with importlib.resources.open_text(data, "repository-list.txt") as repository_list_file:
|
with importlib.resources.open_text(data, "repository-list.txt") as repository_list_file:
|
||||||
all_repos = repository_list_file.read().splitlines()
|
all_repos = repository_list_file.read().splitlines()
|
||||||
|
|
||||||
|
22
app/stack_state.py
Normal file
22
app/stack_state.py
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
# Copyright © 2023 Cerc
|
||||||
|
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this program. If not, see <http:#www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from enum import Enum
|
||||||
|
|
||||||
|
class State(Enum):
|
||||||
|
CREATED = 1
|
||||||
|
CONFIGURED = 2
|
||||||
|
STARTED = 3
|
||||||
|
STOPPED = 4
|
14
app/util.py
14
app/util.py
@ -15,7 +15,7 @@
|
|||||||
|
|
||||||
import os.path
|
import os.path
|
||||||
import sys
|
import sys
|
||||||
import yaml
|
import ruamel.yaml
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
@ -42,7 +42,7 @@ def get_parsed_stack_config(stack):
|
|||||||
stack_file_path = stack if isinstance(stack, os.PathLike) else get_stack_file_path(stack)
|
stack_file_path = stack if isinstance(stack, os.PathLike) else get_stack_file_path(stack)
|
||||||
try:
|
try:
|
||||||
with stack_file_path:
|
with stack_file_path:
|
||||||
stack_config = yaml.safe_load(open(stack_file_path, "r"))
|
stack_config = get_yaml().load(open(stack_file_path, "r"))
|
||||||
return stack_config
|
return stack_config
|
||||||
except FileNotFoundError as error:
|
except FileNotFoundError as error:
|
||||||
# We try here to generate a useful diagnostic error
|
# We try here to generate a useful diagnostic error
|
||||||
@ -60,7 +60,7 @@ def get_parsed_deployment_spec(spec_file):
|
|||||||
spec_file_path = Path(spec_file)
|
spec_file_path = Path(spec_file)
|
||||||
try:
|
try:
|
||||||
with spec_file_path:
|
with spec_file_path:
|
||||||
deploy_spec = yaml.safe_load(open(spec_file_path, "r"))
|
deploy_spec = get_yaml().load(open(spec_file_path, "r"))
|
||||||
return deploy_spec
|
return deploy_spec
|
||||||
except FileNotFoundError as error:
|
except FileNotFoundError as error:
|
||||||
# We try here to generate a useful diagnostic error
|
# We try here to generate a useful diagnostic error
|
||||||
@ -69,6 +69,14 @@ def get_parsed_deployment_spec(spec_file):
|
|||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
def get_yaml():
|
||||||
|
# See: https://stackoverflow.com/a/45701840/1701505
|
||||||
|
yaml = ruamel.yaml.YAML()
|
||||||
|
yaml.preserve_quotes = True
|
||||||
|
yaml.indent(sequence=3, offset=1)
|
||||||
|
return yaml
|
||||||
|
|
||||||
|
|
||||||
# TODO: this is fragile wrt to the subcommand depth
|
# TODO: this is fragile wrt to the subcommand depth
|
||||||
# See also: https://github.com/pallets/click/issues/108
|
# See also: https://github.com/pallets/click/issues/108
|
||||||
def global_options(ctx):
|
def global_options(ctx):
|
||||||
|
@ -22,7 +22,7 @@ def command(ctx):
|
|||||||
'''print tool version'''
|
'''print tool version'''
|
||||||
|
|
||||||
# See: https://stackoverflow.com/a/20885799/1701505
|
# See: https://stackoverflow.com/a/20885799/1701505
|
||||||
from . import data
|
from app import data
|
||||||
with importlib.resources.open_text(data, "build_tag.txt") as version_file:
|
with importlib.resources.open_text(data, "build_tag.txt") as version_file:
|
||||||
# TODO: code better version that skips comment lines
|
# TODO: code better version that skips comment lines
|
||||||
version_string = version_file.read().splitlines()[1]
|
version_string = version_file.read().splitlines()[1]
|
||||||
|
Loading…
Reference in New Issue
Block a user